Skip to content

Commit

Permalink
Add page-size option
Browse files Browse the repository at this point in the history
  • Loading branch information
AlyoshaVasilieva committed Jul 25, 2024
1 parent b0b6179 commit 1c271a9
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 9 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "cbc-sl"
version = "0.6.0"
version = "0.6.1"
edition = "2021"
authors = ["Malloc Voidstar <[email protected]>"]
license = "Apache-2.0"
Expand Down
21 changes: 13 additions & 8 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,16 @@ struct Args {
/// User-Agent or it will reject your request
#[clap(short = 'n', long = "no-run", conflicts_with_all(&["list", "replays"]))]
no_run: bool,
/// List available Olympics streams
/// List available Olympics streams (at most page-size are shown)
#[clap(short = 'l', long = "list", conflicts_with_all(&["url", "replays"]))]
list: bool,
/// List available Olympics replays (at most 24 are shown)
/// List available Olympics replays (at most page-size are shown)
#[clap(short = 'a', long = "replays", conflicts_with_all(&["url", "list"]))]
replays: bool,
/// Size of a "page" of streams to load. Since this tool only loads one page, this means
/// how many streams/replays to show for --list and --replays
#[clap(long = "page-size", default_value = "24")]
page_size: u8,
/// Streamlink log level
#[clap(long = "loglevel", value_parser(["none", "error", "warning", "info", "debug", "trace"]), default_value = "info")]
loglevel: String,
Expand All @@ -64,7 +68,7 @@ struct Args {
url: Option<String>,
}

fn get_live_and_upcoming(agent: &Agent) -> Result<api::GqlResponse> {
fn get_live_and_upcoming(agent: &Agent, page_size: u8) -> Result<api::GqlResponse> {
const LIVE_QUERY: &str =
"query contentItemsByItemsQueryFilters($itemsQueryFilters:ItemsQueryFilters\
,$page:Int,$pageSize:Int,$minPubDate:String,$maxPubDate:String,$lineupOnly:Boolean,$offset:Int)\
Expand All @@ -87,7 +91,7 @@ fn get_live_and_upcoming(agent: &Agent) -> Result<api::GqlResponse> {
"variables": {
"lineupOnly": false,
"page": 1,
"pageSize": 15,
"pageSize": page_size,
"maxPubDate": "now+35d",
"minPubDate": "now-14h",
"itemsQueryFilters": {
Expand All @@ -106,7 +110,7 @@ fn get_live_and_upcoming(agent: &Agent) -> Result<api::GqlResponse> {
Ok(agent.post("https://www.cbc.ca/graphql").send_json(query)?.into_json()?)
}

fn get_replays(agent: &Agent) -> Result<api::GqlResponse> {
fn get_replays(agent: &Agent, page_size: u8) -> Result<api::GqlResponse> {
const VOD_QUERY: &str = "query contentItemsByItemsQueryFilters($itemsQueryFilters:\
ItemsQueryFilters,$page:Int,$pageSize:Int,$minPubDate:String,$maxPubDate:String,\
$lineupOnly:Boolean,$offset:Int){allContentItems(itemsQueryFilters:$itemsQueryFilters,\
Expand All @@ -129,7 +133,7 @@ fn get_replays(agent: &Agent) -> Result<api::GqlResponse> {
"variables": {
"lineupOnly": false,
"page": 1,
"pageSize": 16,
"pageSize": page_size,
"itemsQueryFilters": {
"types": [
"video"
Expand All @@ -153,14 +157,15 @@ fn main() -> Result<()> {
ab = ab.proxy(Proxy::new(proxy_url_ureq(proxy))?);
}
let agent = ab.build();
let psz = args.page_size;
if args.list {
for item in get_live_and_upcoming(&agent)?.data.all_content_items.nodes {
for item in get_live_and_upcoming(&agent, psz)?.data.all_content_items.nodes {
println!("{}", item.to_human(args.full_urls)?);
}
return Ok(());
}
if args.replays {
for item in get_replays(&agent)?.data.all_content_items.nodes {
for item in get_replays(&agent, psz)?.data.all_content_items.nodes {
println!("{}", item.to_human(args.full_urls)?);
}
return Ok(());
Expand Down

0 comments on commit 1c271a9

Please sign in to comment.