mirror of
https://github.com/iptv-org/iptv.git
synced 2026-02-01 10:02:41 -05:00
Merge pull request #27911 from iptv-org/patch-2025.10.1
Patch 2025.10.1
This commit is contained in:
26
.github/CODE_OF_CONDUCT.md
vendored
26
.github/CODE_OF_CONDUCT.md
vendored
@@ -1,13 +1,13 @@
|
||||
# Contributor Code of Conduct
|
||||
|
||||
As contributors and maintainers of this project, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.
|
||||
|
||||
We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, or religion.
|
||||
|
||||
Examples of unacceptable behavior by participants include the use of sexual language or imagery, derogatory comments or personal attacks, trolling, public or private harassment, insults, or other unprofessional conduct.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. Project maintainers who do not follow the Code of Conduct may be removed from the project team.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers.
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant](http:contributor-covenant.org), version 1.0.0, available at https://www.contributor-covenant.org/version/1/0/0/code-of-conduct.html
|
||||
# Contributor Code of Conduct
|
||||
|
||||
As contributors and maintainers of this project, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.
|
||||
|
||||
We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, or religion.
|
||||
|
||||
Examples of unacceptable behavior by participants include the use of sexual language or imagery, derogatory comments or personal attacks, trolling, public or private harassment, insults, or other unprofessional conduct.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. Project maintainers who do not follow the Code of Conduct may be removed from the project team.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers.
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant](http:contributor-covenant.org), version 1.0.0, available at https://www.contributor-covenant.org/version/1/0/0/code-of-conduct.html
|
||||
|
||||
24
.github/FUNDING.yml
vendored
24
.github/FUNDING.yml
vendored
@@ -1,12 +1,12 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: iptv-org
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: iptv-org
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
|
||||
164
.github/ISSUE_TEMPLATE/1_streams_add.yml
vendored
164
.github/ISSUE_TEMPLATE/1_streams_add.yml
vendored
@@ -1,82 +1,82 @@
|
||||
name: ➕ Add stream
|
||||
description: Request to add a new stream link into the playlist
|
||||
title: 'Add: '
|
||||
labels: ['streams:add']
|
||||
|
||||
body:
|
||||
- type: input
|
||||
id: stream_id
|
||||
attributes:
|
||||
label: Stream ID (required)
|
||||
description: "ID of the stream consisting of `<channel_id>` or `<channel_id>@<feed_id>`. Full list of supported channels with corresponding ID could be found on [iptv-org.github.io](https://iptv-org.github.io/). If you can't find the channel you want in the list, please let us know through this [form](https://github.com/iptv-org/database/issues/new?assignees=&labels=channels%3Aadd&projects=&template=channels_add.yml&title=Add%3A+) before posting your request."
|
||||
placeholder: 'BBCAmerica.us@East'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: stream_url
|
||||
attributes:
|
||||
label: Stream URL (required)
|
||||
description: Link to be added to the playlist
|
||||
placeholder: 'https://example.com/playlist.m3u8'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: quality
|
||||
attributes:
|
||||
label: Quality
|
||||
description: Maximum video resolution available on the link
|
||||
options:
|
||||
- 2160p
|
||||
- 1280p
|
||||
- 1080p
|
||||
- 1080i
|
||||
- 720p
|
||||
- 576p
|
||||
- 576i
|
||||
- 480p
|
||||
- 480i
|
||||
- 360p
|
||||
|
||||
- type: dropdown
|
||||
id: label
|
||||
attributes:
|
||||
label: Label
|
||||
description: Is there any reason why the broadcast may not work?
|
||||
options:
|
||||
- 'Not 24/7'
|
||||
- 'Geo-blocked'
|
||||
|
||||
- type: input
|
||||
id: http_user_agent
|
||||
attributes:
|
||||
label: HTTP User Agent
|
||||
placeholder: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36 Edge/12.246'
|
||||
|
||||
- type: input
|
||||
id: http_referrer
|
||||
attributes:
|
||||
label: HTTP Referrer
|
||||
placeholder: 'https://example.com/'
|
||||
|
||||
- type: textarea
|
||||
id: directives
|
||||
attributes:
|
||||
label: Directives
|
||||
description: 'List of directives telling players how to play the stream. Supported `#KODIPROP` and `#VLCOPT`.'
|
||||
placeholder: '#KODIPROP:inputstream=inputstream.adaptive'
|
||||
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
label: Notes
|
||||
description: 'Anything else we should know about this broadcast?'
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
||||
name: ➕ Add stream
|
||||
description: Request to add a new stream link into the playlist
|
||||
title: 'Add: '
|
||||
labels: ['streams:add']
|
||||
|
||||
body:
|
||||
- type: input
|
||||
id: stream_id
|
||||
attributes:
|
||||
label: Stream ID (required)
|
||||
description: "ID of the stream consisting of `<channel_id>` or `<channel_id>@<feed_id>`. Full list of supported channels with corresponding ID could be found on [iptv-org.github.io](https://iptv-org.github.io/). If you can't find the channel you want in the list, please let us know through this [form](https://github.com/iptv-org/database/issues/new?assignees=&labels=channels%3Aadd&projects=&template=channels_add.yml&title=Add%3A+) before posting your request."
|
||||
placeholder: 'BBCAmerica.us@East'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: stream_url
|
||||
attributes:
|
||||
label: Stream URL (required)
|
||||
description: Link to be added to the playlist
|
||||
placeholder: 'https://example.com/playlist.m3u8'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: quality
|
||||
attributes:
|
||||
label: Quality
|
||||
description: Maximum video resolution available on the link
|
||||
options:
|
||||
- 2160p
|
||||
- 1280p
|
||||
- 1080p
|
||||
- 1080i
|
||||
- 720p
|
||||
- 576p
|
||||
- 576i
|
||||
- 480p
|
||||
- 480i
|
||||
- 360p
|
||||
|
||||
- type: dropdown
|
||||
id: label
|
||||
attributes:
|
||||
label: Label
|
||||
description: Is there any reason why the broadcast may not work?
|
||||
options:
|
||||
- 'Not 24/7'
|
||||
- 'Geo-blocked'
|
||||
|
||||
- type: input
|
||||
id: http_user_agent
|
||||
attributes:
|
||||
label: HTTP User Agent
|
||||
placeholder: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36 Edge/12.246'
|
||||
|
||||
- type: input
|
||||
id: http_referrer
|
||||
attributes:
|
||||
label: HTTP Referrer
|
||||
placeholder: 'https://example.com/'
|
||||
|
||||
- type: textarea
|
||||
id: directives
|
||||
attributes:
|
||||
label: Directives
|
||||
description: 'List of directives telling players how to play the stream. Supported `#KODIPROP` and `#EXTVLCOPT`.'
|
||||
placeholder: '#KODIPROP:inputstream=inputstream.adaptive'
|
||||
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
label: Notes
|
||||
description: 'Anything else we should know about this broadcast?'
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
||||
|
||||
188
.github/ISSUE_TEMPLATE/2_streams_edit.yml
vendored
188
.github/ISSUE_TEMPLATE/2_streams_edit.yml
vendored
@@ -1,94 +1,94 @@
|
||||
name: ✏️ Edit stream
|
||||
description: Request to edit stream description
|
||||
title: 'Edit: '
|
||||
labels: ['streams:edit']
|
||||
|
||||
body:
|
||||
- type: input
|
||||
id: stream_url
|
||||
attributes:
|
||||
label: Stream URL (required)
|
||||
description: Link to the stream to be updated
|
||||
placeholder: 'https://lnc-kdfw-fox-aws.tubi.video/index.m3u8'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
What exactly needs to be changed? To delete an existing value without replacement use the `~` symbol.
|
||||
|
||||
- type: input
|
||||
id: new_stream_url
|
||||
attributes:
|
||||
label: New Stream URL
|
||||
description: New link to the stream
|
||||
placeholder: 'https://servilive.com:3126/live/tele2000live.m3u8'
|
||||
|
||||
- type: input
|
||||
id: stream_id
|
||||
attributes:
|
||||
label: Stream ID
|
||||
description: "ID of the stream consisting of `<channel_id>` or `<channel_id>@<feed_id>`. Full list of supported channels with corresponding ID could be found on [iptv-org.github.io](https://iptv-org.github.io/). If you can't find the channel you want in the list, please let us know through this [form](https://github.com/iptv-org/database/issues/new?assignees=&labels=channels%3Aadd&projects=&template=channels_add.yml&title=Add%3A+) before posting your request."
|
||||
placeholder: 'BBCAmerica.us@East'
|
||||
|
||||
- type: dropdown
|
||||
id: quality
|
||||
attributes:
|
||||
label: Quality
|
||||
description: Maximum video resolution available on the link
|
||||
options:
|
||||
- 2160p
|
||||
- 1280p
|
||||
- 1080p
|
||||
- 1080i
|
||||
- 720p
|
||||
- 576p
|
||||
- 576i
|
||||
- 480p
|
||||
- 480i
|
||||
- 360p
|
||||
- '~'
|
||||
|
||||
- type: dropdown
|
||||
id: label
|
||||
attributes:
|
||||
label: Label
|
||||
description: Is there any reason why the broadcast may not work?
|
||||
options:
|
||||
- 'Not 24/7'
|
||||
- 'Geo-blocked'
|
||||
- '~'
|
||||
|
||||
- type: input
|
||||
id: http_user_agent
|
||||
attributes:
|
||||
label: HTTP User Agent
|
||||
placeholder: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36 Edge/12.246'
|
||||
|
||||
- type: input
|
||||
id: http_referrer
|
||||
attributes:
|
||||
label: HTTP Referrer
|
||||
placeholder: 'https://example.com/'
|
||||
|
||||
- type: textarea
|
||||
id: directives
|
||||
attributes:
|
||||
label: Directives
|
||||
description: 'List of directives telling players how to play the stream. Supported `#KODIPROP` and `#VLCOPT`.'
|
||||
placeholder: '#KODIPROP:inputstream=inputstream.adaptive'
|
||||
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
label: Notes
|
||||
placeholder: 'Anything else we should know?'
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
||||
name: ✏️ Edit stream
|
||||
description: Request to edit stream description
|
||||
title: 'Edit: '
|
||||
labels: ['streams:edit']
|
||||
|
||||
body:
|
||||
- type: input
|
||||
id: stream_url
|
||||
attributes:
|
||||
label: Stream URL (required)
|
||||
description: Link to the stream to be updated
|
||||
placeholder: 'https://lnc-kdfw-fox-aws.tubi.video/index.m3u8'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
What exactly needs to be changed? To delete an existing value without replacement use the `~` symbol.
|
||||
|
||||
- type: input
|
||||
id: new_stream_url
|
||||
attributes:
|
||||
label: New Stream URL
|
||||
description: New link to the stream
|
||||
placeholder: 'https://servilive.com:3126/live/tele2000live.m3u8'
|
||||
|
||||
- type: input
|
||||
id: stream_id
|
||||
attributes:
|
||||
label: Stream ID
|
||||
description: "ID of the stream consisting of `<channel_id>` or `<channel_id>@<feed_id>`. Full list of supported channels with corresponding ID could be found on [iptv-org.github.io](https://iptv-org.github.io/). If you can't find the channel you want in the list, please let us know through this [form](https://github.com/iptv-org/database/issues/new?assignees=&labels=channels%3Aadd&projects=&template=channels_add.yml&title=Add%3A+) before posting your request."
|
||||
placeholder: 'BBCAmerica.us@East'
|
||||
|
||||
- type: dropdown
|
||||
id: quality
|
||||
attributes:
|
||||
label: Quality
|
||||
description: Maximum video resolution available on the link
|
||||
options:
|
||||
- 2160p
|
||||
- 1280p
|
||||
- 1080p
|
||||
- 1080i
|
||||
- 720p
|
||||
- 576p
|
||||
- 576i
|
||||
- 480p
|
||||
- 480i
|
||||
- 360p
|
||||
- '~'
|
||||
|
||||
- type: dropdown
|
||||
id: label
|
||||
attributes:
|
||||
label: Label
|
||||
description: Is there any reason why the broadcast may not work?
|
||||
options:
|
||||
- 'Not 24/7'
|
||||
- 'Geo-blocked'
|
||||
- '~'
|
||||
|
||||
- type: input
|
||||
id: http_user_agent
|
||||
attributes:
|
||||
label: HTTP User Agent
|
||||
placeholder: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36 Edge/12.246'
|
||||
|
||||
- type: input
|
||||
id: http_referrer
|
||||
attributes:
|
||||
label: HTTP Referrer
|
||||
placeholder: 'https://example.com/'
|
||||
|
||||
- type: textarea
|
||||
id: directives
|
||||
attributes:
|
||||
label: Directives
|
||||
description: 'List of directives telling players how to play the stream. Supported `#KODIPROP` and `#EXTVLCOPT`.'
|
||||
placeholder: '#KODIPROP:inputstream=inputstream.adaptive'
|
||||
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
label: Notes
|
||||
placeholder: 'Anything else we should know?'
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
||||
|
||||
98
.github/ISSUE_TEMPLATE/3_streams_report.yml
vendored
98
.github/ISSUE_TEMPLATE/3_streams_report.yml
vendored
@@ -1,49 +1,49 @@
|
||||
name: 🚧 Report a stream
|
||||
description: Report a broken or unstable stream
|
||||
title: 'Report: '
|
||||
labels: ['streams:remove']
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Please fill out the form as much as you can so we could efficiently process your request. To suggest a new replacement link, use this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=streams:add&projects=&template=1_streams_add.yml&title=Add%3A+).
|
||||
|
||||
- type: textarea
|
||||
id: stream_url
|
||||
attributes:
|
||||
label: Stream URL
|
||||
description: List all links in question (one per line)
|
||||
placeholder: 'https://lnc-kdfw-fox-aws.tubi.video/index.m3u8'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: reason
|
||||
attributes:
|
||||
label: What happened to the stream?
|
||||
options:
|
||||
- Not loading
|
||||
- Constantly interrupts/lagging
|
||||
- Stuck at a single frame
|
||||
- I see visual artifacts
|
||||
- Shows looped video
|
||||
- No sound
|
||||
- Displays a message asking to renew subscription
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
label: Notes (optional)
|
||||
placeholder: 'Anything else we should know?'
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
||||
name: 🚧 Report a stream
|
||||
description: Report a broken or unstable stream
|
||||
title: 'Report: '
|
||||
labels: ['streams:remove']
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Please fill out the form as much as you can so we could efficiently process your request. To suggest a new replacement link, use this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=streams:add&projects=&template=1_streams_add.yml&title=Add%3A+).
|
||||
|
||||
- type: textarea
|
||||
id: stream_url
|
||||
attributes:
|
||||
label: Stream URL
|
||||
description: List all links in question (one per line)
|
||||
placeholder: 'https://lnc-kdfw-fox-aws.tubi.video/index.m3u8'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: reason
|
||||
attributes:
|
||||
label: What happened to the stream?
|
||||
options:
|
||||
- Not loading
|
||||
- Constantly interrupts/lagging
|
||||
- Stuck at a single frame
|
||||
- I see visual artifacts
|
||||
- Shows looped video
|
||||
- No sound
|
||||
- Displays a message asking to renew subscription
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
label: Notes (optional)
|
||||
placeholder: 'Anything else we should know?'
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
||||
|
||||
56
.github/ISSUE_TEMPLATE/4_channel-search.yml
vendored
56
.github/ISSUE_TEMPLATE/4_channel-search.yml
vendored
@@ -1,28 +1,28 @@
|
||||
name: 🔍 Channel search
|
||||
description: Ask for help in finding a link to the channel stream.
|
||||
title: 'Find: '
|
||||
labels: ['channel search']
|
||||
|
||||
body:
|
||||
- type: input
|
||||
id: stream_id
|
||||
attributes:
|
||||
label: Channel ID (required)
|
||||
description: Unique channel ID from [iptv-org.github.io](https://iptv-org.github.io/). If you can't find the channel you want in the list, please let us know through this [form](https://github.com/iptv-org/database/issues/new?assignees=&labels=channels%3Aadd&projects=&template=channels_add.yml&title=Add%3A+) before posting your request.
|
||||
placeholder: 'BBCAmericaEast.us'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
label: Notes
|
||||
description: 'Any additional information that may help find a link to the stream faster?'
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
||||
name: 🔍 Channel search
|
||||
description: Ask for help in finding a link to the channel stream.
|
||||
title: 'Find: '
|
||||
labels: ['channel search']
|
||||
|
||||
body:
|
||||
- type: input
|
||||
id: stream_id
|
||||
attributes:
|
||||
label: Stream ID (required)
|
||||
description: Unique ID of the channel and feed from [iptv-org.github.io](https://iptv-org.github.io/). If you cannot find the channel or feed you are looking for in the list, please let us know via one of the [forms](https://github.com/iptv-org/database/issues/new/choose) before posting your request.
|
||||
placeholder: 'BBCAmerica.us@East'
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
label: Notes
|
||||
description: 'Any additional information that may help find a link to the stream faster?'
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
||||
|
||||
36
.github/ISSUE_TEMPLATE/5_bug-report.yml
vendored
36
.github/ISSUE_TEMPLATE/5_bug-report.yml
vendored
@@ -1,19 +1,19 @@
|
||||
name: 🐞 Bug report
|
||||
description: Report an error in this repository
|
||||
labels: ['bug']
|
||||
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe your issue
|
||||
description: Please describe the error in as much detail as possible so that we can fix it quickly.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
name: 🐞 Bug report
|
||||
description: Report an error in this repository
|
||||
labels: ['bug']
|
||||
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe your issue
|
||||
description: Please describe the error in as much detail as possible so that we can fix it quickly.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
||||
100
.github/ISSUE_TEMPLATE/6_copyright-claim.yml
vendored
100
.github/ISSUE_TEMPLATE/6_copyright-claim.yml
vendored
@@ -1,50 +1,50 @@
|
||||
name: ©️ Copyright removal request
|
||||
description: Request to remove content
|
||||
labels: ['removal request']
|
||||
body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: Your full legal name
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Are you the copyright holder or authorized to act on the copyright owner's behalf?
|
||||
description: We cannot process your request unless it is submitted by the copyright owner or an agent authorized to act on behalf of the copyright owner.
|
||||
options:
|
||||
- Yes, I am the copyright holder.
|
||||
- Yes, I am authorized to act on the copyright owner's behalf.
|
||||
- No.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Please describe the nature of your copyright ownership or authorization to act on the owner's behalf.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Please provide a detailed description of the original copyrighted work that has allegedly been infringed. If possible, include a URL to where it is posted online.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What content should be removed? Please specify the URL for each item or, if it is an entire file, the file's URL.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Any additional information we need to know?
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
||||
name: ©️ Copyright removal request
|
||||
description: Request to remove content
|
||||
labels: ['removal request']
|
||||
body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: Your full legal name
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Are you the copyright holder or authorized to act on the copyright owner's behalf?
|
||||
description: We cannot process your request unless it is submitted by the copyright owner or an agent authorized to act on behalf of the copyright owner.
|
||||
options:
|
||||
- Yes, I am the copyright holder.
|
||||
- Yes, I am authorized to act on the copyright owner's behalf.
|
||||
- No.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Please describe the nature of your copyright ownership or authorization to act on the owner's behalf.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Please provide a detailed description of the original copyrighted work that has allegedly been infringed. If possible, include a URL to where it is posted online.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What content should be removed? Please specify the URL for each item or, if it is an entire file, the file's URL.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Any additional information we need to know?
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Contributing Guide
|
||||
description: 'Please read this guide before posting your request'
|
||||
options:
|
||||
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)
|
||||
required: true
|
||||
|
||||
22
.github/ISSUE_TEMPLATE/config.yml
vendored
22
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,11 +1,11 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 💡 Ideas
|
||||
url: https://github.com/orgs/iptv-org/discussions/categories/ideas
|
||||
about: Share ideas for new features
|
||||
- name: 🙌 Show and tell
|
||||
url: https://github.com/orgs/iptv-org/discussions/categories/show-and-tell
|
||||
about: Show off something you've made
|
||||
- name: ❓ Q&A
|
||||
url: https://github.com/orgs/iptv-org/discussions/categories/q-a
|
||||
about: Ask the community for help
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 💡 Ideas
|
||||
url: https://github.com/orgs/iptv-org/discussions/categories/ideas
|
||||
about: Share ideas for new features
|
||||
- name: 🙌 Show and tell
|
||||
url: https://github.com/orgs/iptv-org/discussions/categories/show-and-tell
|
||||
about: Show off something you've made
|
||||
- name: ❓ Q&A
|
||||
url: https://github.com/orgs/iptv-org/discussions/categories/q-a
|
||||
about: Ask the community for help
|
||||
|
||||
77
.github/workflows/check.yml
vendored
77
.github/workflows/check.yml
vendored
@@ -1,37 +1,42 @@
|
||||
name: check
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: changed files
|
||||
id: files
|
||||
run: |
|
||||
git fetch origin master:master
|
||||
ANY_CHANGED=false
|
||||
ALL_CHANGED_FILES=$(git diff --name-only master -- streams/ | tr '\n' ' ')
|
||||
if [ -n "${ALL_CHANGED_FILES}" ]; then
|
||||
ANY_CHANGED=true
|
||||
fi
|
||||
echo "all_changed_files=$ALL_CHANGED_FILES" >> "$GITHUB_OUTPUT"
|
||||
echo "any_changed=$ANY_CHANGED" >> "$GITHUB_OUTPUT"
|
||||
- uses: actions/setup-node@v4
|
||||
if: ${{ !env.ACT && steps.files.outputs.any_changed == 'true' }}
|
||||
with:
|
||||
node-version: 22
|
||||
cache: 'npm'
|
||||
- name: install dependencies
|
||||
if: steps.files.outputs.any_changed == 'true'
|
||||
run: npm install
|
||||
- name: validate
|
||||
if: steps.files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
npm run playlist:lint -- ${{ steps.files.outputs.all_changed_files }}
|
||||
name: check
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Get list of changed files
|
||||
id: files
|
||||
run: |
|
||||
git fetch origin master:master
|
||||
ANY_CHANGED=false
|
||||
ALL_CHANGED_FILES=$(git diff --name-only master -- streams/ | tr '\n' ' ')
|
||||
if [ -n "${ALL_CHANGED_FILES}" ]; then
|
||||
ANY_CHANGED=true
|
||||
fi
|
||||
echo "all_changed_files=$ALL_CHANGED_FILES" >> "$GITHUB_OUTPUT"
|
||||
echo "any_changed=$ANY_CHANGED" >> "$GITHUB_OUTPUT"
|
||||
- uses: actions/setup-node@v4
|
||||
if: steps.files.outputs.any_changed == 'true'
|
||||
with:
|
||||
node-version: 22
|
||||
cache: 'npm'
|
||||
- name: Setup .npmrc for GitHub Packages
|
||||
run: |
|
||||
echo "//npm.pkg.github.com/:_authToken=$GITHUB_TOKEN" >> .npmrc
|
||||
echo "@iptv-org:registry=https://npm.pkg.github.com/" >> .npmrc
|
||||
echo "always-auth=true" >> .npmrc
|
||||
- name: Install dependencies
|
||||
if: steps.files.outputs.any_changed == 'true'
|
||||
run: npm install
|
||||
- name: Validate changed files
|
||||
if: steps.files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
npm run playlist:lint -- ${{ steps.files.outputs.all_changed_files }}
|
||||
npm run playlist:validate -- ${{ steps.files.outputs.all_changed_files }}
|
||||
193
.github/workflows/format.yml
vendored
193
.github/workflows/format.yml
vendored
@@ -1,131 +1,62 @@
|
||||
name: format
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# pull_request:
|
||||
# types: [closed]
|
||||
# branches:
|
||||
# - master
|
||||
# schedule:
|
||||
# - cron: "0 12 * * *"
|
||||
jobs:
|
||||
on_trigger:
|
||||
# if: ${{ github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' }}
|
||||
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: tibdex/github-app-token@v1.8.2
|
||||
if: ${{ !env.ACT }}
|
||||
id: create-app-token
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
if: ${{ !env.ACT }}
|
||||
with:
|
||||
token: ${{ steps.create-app-token.outputs.token }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: 'npm'
|
||||
- name: setup git
|
||||
run: |
|
||||
git config user.name "iptv-bot[bot]"
|
||||
git config user.email "84861620+iptv-bot[bot]@users.noreply.github.com"
|
||||
- name: install dependencies
|
||||
run: npm install
|
||||
- name: format internal playlists
|
||||
run: npm run playlist:format
|
||||
- name: check internal playlists
|
||||
run: |
|
||||
npm run playlist:lint
|
||||
npm run playlist:validate
|
||||
- name: changed files
|
||||
id: files_after
|
||||
run: |
|
||||
ANY_CHANGED=false
|
||||
ALL_CHANGED_FILES=$(git diff --name-only master -- streams/ | tr '\n' ' ')
|
||||
if [ -n "${ALL_CHANGED_FILES}" ]; then
|
||||
ANY_CHANGED=true
|
||||
fi
|
||||
echo "all_changed_files=$ALL_CHANGED_FILES" >> "$GITHUB_OUTPUT"
|
||||
echo "any_changed=$ANY_CHANGED" >> "$GITHUB_OUTPUT"
|
||||
- name: git status
|
||||
run: git status
|
||||
- name: commit changes
|
||||
if: steps.files_after.outputs.any_changed == 'true'
|
||||
run: |
|
||||
git add streams
|
||||
git status
|
||||
git commit -m "[Bot] Format /streams" -m "Committed by [iptv-bot](https://github.com/apps/iptv-bot) via [format](https://github.com/iptv-org/iptv/actions/runs/${{ github.run_id }}) workflow." --no-verify
|
||||
- name: push all changes to the repository
|
||||
if: ${{ !env.ACT && github.ref == 'refs/heads/master' && steps.files_after.outputs.any_changed == 'true' }}
|
||||
run: git push
|
||||
on_merge:
|
||||
if: github.event.pull_request.merged == true
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: tibdex/github-app-token@v1.8.2
|
||||
if: ${{ !env.ACT }}
|
||||
id: create-app-token
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
if: ${{ !env.ACT }}
|
||||
with:
|
||||
token: ${{ steps.create-app-token.outputs.token }}
|
||||
- name: changed files
|
||||
id: files
|
||||
run: |
|
||||
ANY_CHANGED=false
|
||||
ALL_CHANGED_FILES=$(git diff --name-only master -- streams/ | tr '\n' ' ')
|
||||
if [ -n "${ALL_CHANGED_FILES}" ]; then
|
||||
ANY_CHANGED=true
|
||||
fi
|
||||
echo "all_changed_files=$ALL_CHANGED_FILES" >> "$GITHUB_OUTPUT"
|
||||
echo "any_changed=$ANY_CHANGED" >> "$GITHUB_OUTPUT"
|
||||
- uses: actions/setup-node@v4
|
||||
if: ${{ steps.files.outputs.any_changed == 'true' }}
|
||||
with:
|
||||
node-version: 22
|
||||
cache: 'npm'
|
||||
- name: setup git
|
||||
if: steps.files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
git config user.name "iptv-bot[bot]"
|
||||
git config user.email "84861620+iptv-bot[bot]@users.noreply.github.com"
|
||||
- name: install dependencies
|
||||
if: steps.files.outputs.any_changed == 'true'
|
||||
run: npm install
|
||||
- name: format internal playlists
|
||||
if: steps.files.outputs.any_changed == 'true'
|
||||
run: npm run playlist:format -- ${{ steps.files.outputs.all_changed_files }}
|
||||
- name: check internal playlists
|
||||
if: steps.files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
npm run playlist:lint -- ${{ steps.files.outputs.all_changed_files }}
|
||||
npm run playlist:validate -- ${{ steps.files.outputs.all_changed_files }}
|
||||
- name: git status
|
||||
if: steps.files.outputs.any_changed == 'true'
|
||||
run: git status
|
||||
- name: changed files
|
||||
id: files_after
|
||||
run: |
|
||||
ANY_CHANGED=false
|
||||
ALL_CHANGED_FILES=$(git diff --name-only master -- streams/ | tr '\n' ' ')
|
||||
if [ -n "${ALL_CHANGED_FILES}" ]; then
|
||||
ANY_CHANGED=true
|
||||
fi
|
||||
echo "all_changed_files=$ALL_CHANGED_FILES" >> "$GITHUB_OUTPUT"
|
||||
echo "any_changed=$ANY_CHANGED" >> "$GITHUB_OUTPUT"
|
||||
- name: commit changes
|
||||
if: steps.files_after.outputs.any_changed == 'true'
|
||||
run: |
|
||||
git add streams
|
||||
git status
|
||||
git commit -m "[Bot] Format /streams" -m "Committed by [iptv-bot](https://github.com/apps/iptv-bot) via [format](https://github.com/iptv-org/iptv/actions/runs/${{ github.run_id }}) workflow." --no-verify
|
||||
- name: push all changes to the repository
|
||||
if: ${{ !env.ACT && github.ref == 'refs/heads/master' && steps.files_after.outputs.any_changed == 'true' }}
|
||||
run: git push
|
||||
name: format
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# schedule:
|
||||
# - cron: "0 12 * * *"
|
||||
jobs:
|
||||
on_trigger:
|
||||
# if: ${{ github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' }}
|
||||
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: tibdex/github-app-token@v1.8.2
|
||||
if: ${{ !env.ACT }}
|
||||
id: create-app-token
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
if: ${{ !env.ACT }}
|
||||
with:
|
||||
token: ${{ steps.create-app-token.outputs.token }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: 'npm'
|
||||
- name: Setup .npmrc for GitHub Packages
|
||||
run: |
|
||||
echo "//npm.pkg.github.com/:_authToken=$GITHUB_TOKEN" >> .npmrc
|
||||
echo "@iptv-org:registry=https://npm.pkg.github.com/" >> .npmrc
|
||||
echo "always-auth=true" >> .npmrc
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
- name: Format internal playlists
|
||||
run: npm run playlist:format
|
||||
- name: Check internal playlists
|
||||
run: |
|
||||
npm run playlist:lint
|
||||
npm run playlist:validate
|
||||
- name: Get list of changed files
|
||||
id: files_after
|
||||
run: |
|
||||
ANY_CHANGED=false
|
||||
ALL_CHANGED_FILES=$(git diff --name-only master -- streams/ | tr '\n' ' ')
|
||||
if [ -n "${ALL_CHANGED_FILES}" ]; then
|
||||
ANY_CHANGED=true
|
||||
fi
|
||||
echo "all_changed_files=$ALL_CHANGED_FILES" >> "$GITHUB_OUTPUT"
|
||||
echo "any_changed=$ANY_CHANGED" >> "$GITHUB_OUTPUT"
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config user.name "iptv-bot[bot]"
|
||||
git config user.email "84861620+iptv-bot[bot]@users.noreply.github.com"
|
||||
- name: Commit changes to /streams
|
||||
if: steps.files_after.outputs.any_changed == 'true'
|
||||
run: |
|
||||
git add streams
|
||||
git status
|
||||
git commit -m "[Bot] Format /streams" -m "Committed by [iptv-bot](https://github.com/apps/iptv-bot) via [format](https://github.com/iptv-org/iptv/actions/runs/${{ github.run_id }}) workflow." --no-verify
|
||||
- name: Push all changes to the repository
|
||||
if: ${{ !env.ACT && github.ref == 'refs/heads/master' && steps.files_after.outputs.any_changed == 'true' }}
|
||||
run: git push
|
||||
49
.github/workflows/stale.yml
vendored
49
.github/workflows/stale.yml
vendored
@@ -1,25 +1,26 @@
|
||||
name: stale
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
permissions:
|
||||
issues: write
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: tibdex/github-app-token@v1.8.2
|
||||
id: create-app-token
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
repo-token: ${{ steps.create-app-token.outputs.token }}
|
||||
days-before-stale: 180
|
||||
days-before-close: 7
|
||||
operations-per-run: 500
|
||||
stale-issue-label: 'stale'
|
||||
any-of-issue-labels: 'channel search'
|
||||
name: stale
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
permissions:
|
||||
actions: write
|
||||
issues: write
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: tibdex/github-app-token@v1.8.2
|
||||
id: create-app-token
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
repo-token: ${{ steps.create-app-token.outputs.token }}
|
||||
days-before-stale: 180
|
||||
days-before-close: 7
|
||||
operations-per-run: 500
|
||||
stale-issue-label: 'stale'
|
||||
any-of-issue-labels: 'channel search'
|
||||
close-issue-message: 'This request has been closed because it has been inactive for more than 180 days.'
|
||||
167
.github/workflows/update.yml
vendored
167
.github/workflows/update.yml
vendored
@@ -1,82 +1,85 @@
|
||||
name: update
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: tibdex/github-app-token@v1.8.2
|
||||
if: ${{ !env.ACT }}
|
||||
id: create-app-token
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v3
|
||||
if: ${{ !env.ACT }}
|
||||
with:
|
||||
token: ${{ steps.create-app-token.outputs.token }}
|
||||
- name: setup git
|
||||
run: |
|
||||
git config user.name "iptv-bot[bot]"
|
||||
git config user.email "84861620+iptv-bot[bot]@users.noreply.github.com"
|
||||
- uses: actions/setup-node@v3
|
||||
if: ${{ !env.ACT }}
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'npm'
|
||||
- name: install dependencies
|
||||
run: npm install
|
||||
- name: update internal playlists
|
||||
run: npm run playlist:update --silent >> $GITHUB_OUTPUT
|
||||
id: playlist-update
|
||||
- name: check internal playlists
|
||||
run: |
|
||||
npm run playlist:lint
|
||||
npm run playlist:validate
|
||||
- name: generate public playlists
|
||||
run: npm run playlist:generate
|
||||
- name: generate .api/streams.json
|
||||
run: npm run api:generate
|
||||
- name: update readme
|
||||
run: npm run readme:update
|
||||
- run: git status
|
||||
- name: commit changes to /streams
|
||||
run: |
|
||||
git add streams
|
||||
git status
|
||||
git commit --allow-empty -m "[Bot] Update /streams" -m "Committed by [iptv-bot](https://github.com/apps/iptv-bot) via [update](https://github.com/iptv-org/iptv/actions/runs/${{ github.run_id }}) workflow." -m "${{ steps.playlist-update.outputs.OUTPUT }}" --no-verify
|
||||
- name: commit changes to playlists.md
|
||||
run: |
|
||||
git add PLAYLISTS.md
|
||||
git status
|
||||
git commit --allow-empty -m "[Bot] Update PLAYLISTS.md" -m "Committed by [iptv-bot](https://github.com/apps/iptv-bot) via [update](https://github.com/iptv-org/iptv/actions/runs/${{ github.run_id }}) workflow." --no-verify
|
||||
- name: push all changes to the repository
|
||||
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
|
||||
run: git push
|
||||
- name: deploy public playlists to github pages
|
||||
uses: JamesIves/github-pages-deploy-action@4.1.1
|
||||
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
|
||||
with:
|
||||
repository-name: iptv-org/iptv
|
||||
branch: gh-pages
|
||||
folder: .gh-pages
|
||||
token: ${{ steps.create-app-token.outputs.token }}
|
||||
git-config-name: iptv-bot[bot]
|
||||
git-config-email: 84861620+iptv-bot[bot]@users.noreply.github.com
|
||||
commit-message: '[Bot] Deploy to GitHub Pages'
|
||||
clean: true
|
||||
- name: move .api/streams.json to iptv-org/api
|
||||
uses: JamesIves/github-pages-deploy-action@4.1.1
|
||||
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
|
||||
with:
|
||||
repository-name: iptv-org/api
|
||||
branch: gh-pages
|
||||
folder: .api
|
||||
token: ${{ steps.create-app-token.outputs.token }}
|
||||
git-config-name: iptv-bot[bot]
|
||||
git-config-email: 84861620+iptv-bot[bot]@users.noreply.github.com
|
||||
commit-message: '[Bot] Deploy to iptv-org/api'
|
||||
clean: false
|
||||
name: update
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: tibdex/github-app-token@v1.8.2
|
||||
if: ${{ !env.ACT }}
|
||||
id: create-app-token
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v4
|
||||
if: ${{ !env.ACT }}
|
||||
with:
|
||||
token: ${{ steps.create-app-token.outputs.token }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: 'npm'
|
||||
- name: Setup .npmrc for GitHub Packages
|
||||
run: |
|
||||
echo "//npm.pkg.github.com/:_authToken=$GITHUB_TOKEN" >> .npmrc
|
||||
echo "@iptv-org:registry=https://npm.pkg.github.com/" >> .npmrc
|
||||
echo "always-auth=true" >> .npmrc
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
- name: Update internal playlists
|
||||
run: npm run playlist:update --silent >> $GITHUB_OUTPUT
|
||||
id: playlist-update
|
||||
- name: Check internal playlists
|
||||
run: |
|
||||
npm run playlist:lint
|
||||
npm run playlist:validate
|
||||
- name: Generate public playlists
|
||||
run: npm run playlist:generate
|
||||
- name: Generate .api/streams.json
|
||||
run: npm run api:generate
|
||||
- name: Update readme
|
||||
run: npm run readme:update
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config user.name "iptv-bot[bot]"
|
||||
git config user.email "84861620+iptv-bot[bot]@users.noreply.github.com"
|
||||
- name: Commit changes to /streams
|
||||
run: |
|
||||
git add streams
|
||||
git status
|
||||
git commit --allow-empty -m "[Bot] Update /streams" -m "Committed by [iptv-bot](https://github.com/apps/iptv-bot) via [update](https://github.com/iptv-org/iptv/actions/runs/${{ github.run_id }}) workflow." -m "${{ steps.playlist-update.outputs.OUTPUT }}" --no-verify
|
||||
- name: Commit changes to PLAYLIST.md
|
||||
run: |
|
||||
git add PLAYLISTS.md
|
||||
git status
|
||||
git commit --allow-empty -m "[Bot] Update PLAYLISTS.md" -m "Committed by [iptv-bot](https://github.com/apps/iptv-bot) via [update](https://github.com/iptv-org/iptv/actions/runs/${{ github.run_id }}) workflow." --no-verify
|
||||
- name: Push all changes to the repository
|
||||
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
|
||||
run: git push
|
||||
- name: Deploy public playlists to GitHub Pages
|
||||
uses: JamesIves/github-pages-deploy-action@4.1.1
|
||||
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
|
||||
with:
|
||||
repository-name: iptv-org/iptv
|
||||
branch: gh-pages
|
||||
folder: .gh-pages
|
||||
token: ${{ steps.create-app-token.outputs.token }}
|
||||
git-config-name: iptv-bot[bot]
|
||||
git-config-email: 84861620+iptv-bot[bot]@users.noreply.github.com
|
||||
commit-message: '[Bot] Deploy to GitHub Pages'
|
||||
clean: true
|
||||
- name: Move .api/streams.json to iptv-org/api
|
||||
uses: JamesIves/github-pages-deploy-action@4.1.1
|
||||
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
|
||||
with:
|
||||
repository-name: iptv-org/api
|
||||
branch: gh-pages
|
||||
folder: .api
|
||||
token: ${{ steps.create-app-token.outputs.token }}
|
||||
git-config-name: iptv-bot[bot]
|
||||
git-config-email: 84861620+iptv-bot[bot]@users.noreply.github.com
|
||||
commit-message: '[Bot] Deploy to iptv-org/api'
|
||||
clean: false
|
||||
|
||||
16
.gitignore
vendored
16
.gitignore
vendored
@@ -1,9 +1,9 @@
|
||||
node_modules
|
||||
.artifacts
|
||||
.secrets
|
||||
.actrc
|
||||
.DS_Store
|
||||
/.gh-pages/
|
||||
/.api/
|
||||
.env
|
||||
node_modules
|
||||
.artifacts
|
||||
.secrets
|
||||
.actrc
|
||||
.DS_Store
|
||||
/.gh-pages/
|
||||
/.api/
|
||||
.env
|
||||
/temp/
|
||||
6
.readme/.gitignore
vendored
6
.readme/.gitignore
vendored
@@ -1,4 +1,4 @@
|
||||
_categories.md
|
||||
_countries.md
|
||||
_languages.md
|
||||
_categories.md
|
||||
_countries.md
|
||||
_languages.md
|
||||
_regions.md
|
||||
@@ -1,88 +1,88 @@
|
||||
## Playlists
|
||||
|
||||
There are several versions of playlists that differ in the way they are grouped. As of January 30th, 2024, we have stopped distributing NSFW channels. For more information, please look at [this issue](https://github.com/iptv-org/iptv/issues/15723).
|
||||
|
||||
### Grouped by category
|
||||
|
||||
Playlists in which channels are grouped by category.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
<br>
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.category.m3u
|
||||
```
|
||||
|
||||
Same thing, but split up into separate files:
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
#include "./.readme/_categories.md"
|
||||
|
||||
</details>
|
||||
|
||||
### Grouped by language
|
||||
|
||||
Playlists in which channels are grouped by the language in which they are broadcast.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
<br>
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.language.m3u
|
||||
```
|
||||
|
||||
Same thing, but split up into separate files:
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
#include "./.readme/_languages.md"
|
||||
|
||||
</details>
|
||||
|
||||
### Grouped by broadcast area
|
||||
|
||||
Playlists in which channels are grouped by broadcast area.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
|
||||
#### Countries
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.country.m3u
|
||||
```
|
||||
|
||||
Same thing, but split up into separate files:
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
#include "./.readme/_countries.md"
|
||||
|
||||
#### Regions
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
#include "./.readme/_regions.md"
|
||||
|
||||
</details>
|
||||
|
||||
### Grouped by sources
|
||||
|
||||
Playlists in which channels are grouped by broadcast source.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
<br>
|
||||
|
||||
To use the playlist, simply replace `<FILENAME>` in the link below with the name of one of the files in the [streams](streams) folder.
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/sources/<FILENAME>.m3u
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
Also, any of our internal playlists are available in raw form (without any filtering or sorting) at this link:
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/raw/<FILENAME>.m3u
|
||||
```
|
||||
## Playlists
|
||||
|
||||
There are several versions of playlists that differ in the way they are grouped. As of January 30th, 2024, we have stopped distributing NSFW channels. For more information, please look at [this issue](https://github.com/iptv-org/iptv/issues/15723).
|
||||
|
||||
### Grouped by category
|
||||
|
||||
Playlists in which channels are grouped by category.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
<br>
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.category.m3u
|
||||
```
|
||||
|
||||
Same thing, but split up into separate files:
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
#include "./.readme/_categories.md"
|
||||
|
||||
</details>
|
||||
|
||||
### Grouped by language
|
||||
|
||||
Playlists in which channels are grouped by the language in which they are broadcast.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
<br>
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.language.m3u
|
||||
```
|
||||
|
||||
Same thing, but split up into separate files:
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
#include "./.readme/_languages.md"
|
||||
|
||||
</details>
|
||||
|
||||
### Grouped by broadcast area
|
||||
|
||||
Playlists in which channels are grouped by broadcast area.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
|
||||
#### Countries
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.country.m3u
|
||||
```
|
||||
|
||||
Same thing, but split up into separate files:
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
#include "./.readme/_countries.md"
|
||||
|
||||
#### Regions
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
#include "./.readme/_regions.md"
|
||||
|
||||
</details>
|
||||
|
||||
### Grouped by sources
|
||||
|
||||
Playlists in which channels are grouped by broadcast source.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
<br>
|
||||
|
||||
To use the playlist, simply replace `<FILENAME>` in the link below with the name of one of the files in the [streams](streams) folder.
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/sources/<FILENAME>.m3u
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
Also, any of our internal playlists are available in raw form (without any filtering or sorting) at this link:
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/raw/<FILENAME>.m3u
|
||||
```
|
||||
|
||||
430
CONTRIBUTING.md
430
CONTRIBUTING.md
@@ -1,215 +1,215 @@
|
||||
# Contributing Guide
|
||||
|
||||
- [How to?](#how-to)
|
||||
- [Stream Description Scheme](#stream-description-scheme)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Scripts](#scripts)
|
||||
- [Workflows](#workflows)
|
||||
|
||||
## How to?
|
||||
|
||||
### How to add a new stream link to a playlists?
|
||||
|
||||
You have several options:
|
||||
|
||||
1. Create a new request using this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=streams:add&projects=&template=1_streams_add.yml&title=Add%3A+) and if approved, the link will automatically be added to the playlist on the next update.
|
||||
|
||||
2. Add the link to the playlist directly using a [pull request](https://github.com/iptv-org/iptv/pulls).
|
||||
|
||||
Regardless of which option you choose, before posting your request please do the following:
|
||||
|
||||
- Make sure the link you want to add works stably. To check this, open it in one of the players (for example, [VLC player](https://www.videolan.org/vlc/index.html)) and watch the broadcast for at least a minute (some test streams are interrupted after 15-30 seconds).
|
||||
- Make sure the link is not already in the playlist. This can be done by [searching](https://github.com/search?q=repo%3Aiptv-org%2Fiptv+http%3A%2F%2Fexample.com&type=code) the repository.
|
||||
- Find the ID of the channel you want on [iptv-org.github.io](https://iptv-org.github.io/). If your desired channel is not on the list you can leave a request to add it [here](https://github.com/iptv-org/database/issues/new/choose).
|
||||
- Make sure the channel is not blocklisted. It can also be done through [iptv-org.github.io](https://iptv-org.github.io/).
|
||||
- The link does not lead to the Xtream Codes server. [Why don't you accept links to Xtream Codes server?](FAQ.md#why-dont-you-accept-links-to-xtream-codes-server)
|
||||
- If you know that the broadcast only works in certain countries or it is periodically interrupted, do not forget to indicate this in the request.
|
||||
|
||||
A requests without a valid stream ID or working link to the stream will be closed immediately.
|
||||
|
||||
Note all links in playlists are sorted automatically by scripts so there is no need to sort them manually. For more info, see [Scripts](#scripts).
|
||||
|
||||
### How to fix the stream description?
|
||||
|
||||
Most of the stream description (channel name, feed name, categories, languages, broadcast area, logo) we load from the [iptv-org/database](https://github.com/iptv-org/database) using the stream ID.
|
||||
|
||||
So first of all, make sure that the desired stream has the correct ID. A full list of all supported channels and their corresponding IDs can be found on [iptv-org.github.io](https://iptv-org.github.io/). To change the stream ID of any link in the playlist, just fill out this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=streams%3Aedit&projects=&template=2_streams_edit.yml&title=Edit%3A+).
|
||||
|
||||
If, however, you have found an error in the database itself, this is the place to go: [How to edit channel description?](https://github.com/iptv-org/database/blob/master/CONTRIBUTING.md#how-to-edit-channel-description)
|
||||
|
||||
### How to distinguish a link to an Xtream Codes server from a regular one?
|
||||
|
||||
Most of them have this form:
|
||||
|
||||
`http(s)://{hostname}:{port}/{username}/{password}/{channelID}` (port is often `25461`)
|
||||
|
||||
To make sure that the link leads to the Xtream Codes server, copy the `hostname`, `port`, `username` and `password` into the link below and try to open it in a browser:
|
||||
|
||||
`http(s)://{hostname}:{port}/panel_api.php?username={username}&password={password}`
|
||||
|
||||
If the link answers, you're with an Xtream Codes server.
|
||||
|
||||
### How to report a broken stream?
|
||||
|
||||
Fill out this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=streams:remove&projects=&template=3_streams_report.yml&title=Broken%3A+) and as soon as a working replacement appears, we will add it to the playlist or at least remove the non-working one.
|
||||
|
||||
The only thing before publishing your report is to make sure that:
|
||||
|
||||
- The link is still in our playlists. You can verify this by [searching](https://github.com/search?q=repo%3Aiptv-org%2Fiptv+http%3A%2F%2Fexample.com&type=code) the repository.
|
||||
- The link really doesn't work and is not just [geo-blocked](https://en.wikipedia.org/wiki/Geo-blocking). To check this, you can either use a [VPN](https://en.wikipedia.org/wiki/Virtual_private_network) or services such as [streamtest.in](https://streamtest.in/).
|
||||
|
||||
An issue without a valid link will be closed immediately.
|
||||
|
||||
### How to find a broken stream?
|
||||
|
||||
For starters, you can just try to open the playlist in [VLC player](https://www.videolan.org/vlc/). The player outputs all errors to the log (Tools -> Messages) so you'll be able to determine pretty accurately why a link isn't working.
|
||||
|
||||
Another way to test links is to use the NPM script. To do this, first make sure you have [Node.js](https://nodejs.org/en) installed on your system. Then go to the `iptv` folder using [Console](https://en.wikipedia.org/wiki/Windows_Console) (or [Terminal](<https://en.wikipedia.org/wiki/Terminal_(macOS)>) if you have macOS) and run the command:
|
||||
|
||||
```sh
|
||||
npm run playlist:test path/to/playlist.m3u
|
||||
```
|
||||
|
||||
This command will run an automatic check of all links in the playlist and display their status:
|
||||
|
||||
```sh
|
||||
npm run playlist:test streams/fr.m3u
|
||||
|
||||
streams/fr.m3u
|
||||
┌─────┬───────────────────────────┬──────────────────────────────────────────────────────────────────────────────────────────────────────┬───────────────────────────┐
|
||||
│ │ tvg-id │ url │ status │
|
||||
├─────┼───────────────────────────┼──────────────────────────────────────────────────────────────────────────────────────────────────────┼───────────────────────────┤
|
||||
│ 0 │ 6ter.fr │ https://origin-caf900c010ea8046.live.6cloud.fr/out/v1/29c7a579af3348b48230f76cd75699a5/dash_short... │ LOADING... │
|
||||
│ 1 │ 20MinutesTV.fr │ https://lives.digiteka.com/stream/86d3e867-a272-496b-8412-f59aa0104771/index.m3u8 │ FFMPEG_STREAMS_NOT_FOUND │
|
||||
│ 2 │ │ https://video1.getstreamhosting.com:1936/8420/8420/playlist.m3u8 │ OK │
|
||||
│ 3 │ ADNTVPlus.fr │ https://samsunguk-adn-samsung-fre-qfrlc.amagi.tv/playlist/samsunguk-adn-samsung-fre/playlist.m3u8 │ HTTP_FORBIDDEN │
|
||||
│ 4 │ Africa24.fr │ https://edge12.vedge.infomaniak.com/livecast/ik:africa24/manifest.m3u8 │ OK │
|
||||
│ 5 │ Africa24English.fr │ https://edge17.vedge.infomaniak.com/livecast/ik:africa24sport/manifest.m3u8 │ OK │
|
||||
│ 6 │ AfricanewsEnglish.fr │ https://37c774660687468c821a51190046facf.mediatailor.us-east-1.amazonaws.com/v1/master/04fd913bb2... │ HTTP_GATEWAY_TIMEOUT │
|
||||
│ 7 │ AlpedHuezTV.fr │ https://edge.vedge.infomaniak.com/livecast/ik:adhtv/chunklist.m3u8 │ HTTP_NOT_FOUND │
|
||||
```
|
||||
|
||||
After that, all you have to do is report any broken streams you find.
|
||||
|
||||
### How to replace a broken stream?
|
||||
|
||||
This can be done either by filling out this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=streams%3Aedit&projects=&template=2_streams_edit.yml&title=Edit%3A+).
|
||||
|
||||
Either by directly updating the files in the [/streams](/streams) folder and then creating a [pull request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests).
|
||||
|
||||
### How to remove my channel from playlist?
|
||||
|
||||
To request removal of a link to a channel from the repository, you need to fill out this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=removal+request&projects=&template=6_copyright-claim.yml&title=Remove%3A+) and wait for the request to be reviewed (this usually takes no more than 1 business day). And if the request is approved, links to the channel will be immediately removed from the repository.
|
||||
|
||||
The channel will also be added to our [blocklist](https://github.com/iptv-org/database/blob/master/data/blocklist.csv) to avoid its appearance in our playlists in the future.
|
||||
|
||||
Please note that we only accept removal requests from channel owners and their official representatives, all other requests will be closed immediately.
|
||||
|
||||
## Stream Description Scheme
|
||||
|
||||
For a stream to be approved, its description must follow this template:
|
||||
|
||||
```
|
||||
#EXTINF:-1 tvg-id="STREAM_ID",STREAM_TITLE (QUALITY) [LABEL]
|
||||
STREAM_URL
|
||||
```
|
||||
|
||||
| Attribute | Description | Required | Valid values |
|
||||
| -------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------- | -------------------------------------------- |
|
||||
| `STREAM_ID` | Stream ID consisting of channel ID and feed ID. Full list of supported channels with corresponding ID could be found on [iptv-org.github.io](https://iptv-org.github.io/). | Optional | `<channel_id>` or `<channel_id>@<feed_id>` |
|
||||
| `STREAM_TITLE` | Stream title consisting of channel name and feed name. May contain any characters except: `,`, `[`, `]`. | Required | - |
|
||||
| `QUALITY` | Maximum stream quality. | Optional | `2160p`, `1080p`, `720p`, `480p`, `360p` etc |
|
||||
| `LABEL` | Specified in cases where the broadcast for some reason may not be available to some users. | Optional | `Geo-blocked` or `Not 24/7` |
|
||||
| `STREAM_URL` | Stream URL. | Required | - |
|
||||
|
||||
Example:
|
||||
|
||||
```xml
|
||||
#EXTINF:-1 tvg-id="ExampleTV.us@East",Example TV East (720p) [Not 24/7]
|
||||
https://example.com/playlist.m3u8
|
||||
```
|
||||
|
||||
Also, if necessary, you can specify custom [HTTP User-Agent](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent) and [HTTP Referrer](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referer) through additional attributes:
|
||||
|
||||
```xml
|
||||
#EXTINF:-1 tvg-id="ExampleTV.us" http-referrer="http://example.com/" http-user-agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64)",Example TV
|
||||
http://example.com/stream.m3u8
|
||||
```
|
||||
|
||||
or use player-specific directives:
|
||||
|
||||
_VLC_
|
||||
|
||||
```xml
|
||||
#EXTINF:-1 tvg-id="ExampleTV.us@VLC",Example TV
|
||||
#EXTVLCOPT:http-referrer=http://example.com/
|
||||
#EXTVLCOPT:http-user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64)
|
||||
http://example.com/stream.m3u8
|
||||
```
|
||||
|
||||
_Kodi_
|
||||
|
||||
```xml
|
||||
#EXTINF:-1 tvg-id="ExampleTV.us@Kodi",Example TV
|
||||
#KODIPROP:inputstream=inputstream.adaptive
|
||||
#KODIPROP:inputstream.adaptive.stream_headers=Referer=http://example.com/&User-Agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64)
|
||||
http://example.com/stream.m3u8
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
- `.github/`
|
||||
- `ISSUE_TEMPLATE/`: issue templates for the repository.
|
||||
- `workflows`: contains [GitHub actions](https://docs.github.com/en/actions/quickstart) workflows.
|
||||
- `CODE_OF_CONDUCT.md`: rules you shouldn't break if you don't want to get banned.
|
||||
- `.readme/`
|
||||
- `config.json`: config for the `markdown-include` package, which is used to compile everything into one `PLAYLISTS.md` file.
|
||||
- `preview.png`: image displayed in the `README.md`.
|
||||
- `template.md`: template for `PLAYLISTS.md`.
|
||||
- `scripts/`: contains all scripts used in the repository.
|
||||
- `streams/`: contains all streams broken down by the country from which they are broadcast.
|
||||
- `tests/`: contains tests to check the scripts.
|
||||
- `CONTRIBUTING.md`: file you are currently reading.
|
||||
- `PLAYLISTS.md`: auto-updated list of available playlists.
|
||||
- `README.md`: project description.
|
||||
|
||||
## Scripts
|
||||
|
||||
These scripts are created to automate routine processes in the repository and make it a bit easier to maintain.
|
||||
|
||||
For scripts to work, you must have [Node.js](https://nodejs.org/en) installed on your computer.
|
||||
|
||||
To run scripts use the `npm run <script-name>` command.
|
||||
|
||||
- `act:check`: allows to run the [check](https://github.com/iptv-org/iptv/blob/master/.github/workflows/check.yml) workflow locally. Depends on [nektos/act](https://github.com/nektos/act).
|
||||
- `act:format`: allows to test the [format](https://github.com/iptv-org/iptv/blob/master/.github/workflows/update.yml) workflow locally. Depends on [nektos/act](https://github.com/nektos/act).
|
||||
- `act:update`: allows to test the [update](https://github.com/iptv-org/iptv/blob/master/.github/workflows/update.yml) workflow locally. Depends on [nektos/act](https://github.com/nektos/act).
|
||||
- `api:load`: downloads the latest channel and stream data from the [iptv-org/api](https://github.com/iptv-org/api).
|
||||
- `api:generate`: generates a JSON file with all streams for the [iptv-org/api](https://github.com/iptv-org/api) repository.
|
||||
- `api:deploy`: allows to manually upload a JSON file created via `api:generate` to the [iptv-org/api](https://github.com/iptv-org/api) repository. To run the script you must provide your [personal access token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens) with write access to the repository.
|
||||
- `playlist:format`: formats internal playlists. The process includes [URL normalization](https://en.wikipedia.org/wiki/URI_normalization), duplicate removal, removing invalid id's and sorting links by channel name, quality, and label.
|
||||
- `playlist:update`: triggers an update of internal playlists. The process involves processing approved requests from issues.
|
||||
- `playlist:generate`: generates all public playlists.
|
||||
- `playlist:validate`: сhecks ids and links in internal playlists for errors.
|
||||
- `playlist:lint`: сhecks internal playlists for syntax errors.
|
||||
- `playlist:test`: tests links in internal playlists.
|
||||
- `playlist:edit`: utility for quick streams mapping.
|
||||
- `playlist:deploy`: allows to manually publish all generated via `playlist:generate` playlists. To run the script you must provide your [personal access token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens) with write access to the repository.
|
||||
- `readme:update`: updates the list of playlists in [README.md](README.md).
|
||||
- `report:create`: creates a report on current issues.
|
||||
- `check`: (shorthand) sequentially runs the `playlist:lint` and `playlist:validate` scripts.
|
||||
- `format`: (shorthand) runs the `playlist:format` script.
|
||||
- `update`: (shorthand) sequentially runs the `playlist:generate`, `api:generate` and `readme:update` scripts.
|
||||
- `deploy`: (shorthand) sequentially runs the `playlist:deploy` and `api:deploy` scripts.
|
||||
- `lint`: сhecks the scripts for syntax errors.
|
||||
- `test`: runs a test of all the scripts described above.
|
||||
|
||||
## Workflows
|
||||
|
||||
To automate the run of the scripts described above, we use the [GitHub Actions workflows](https://docs.github.com/en/actions/using-workflows).
|
||||
|
||||
Each workflow includes its own set of scripts that can be run either manually or in response to an event.
|
||||
|
||||
- `check`: sequentially runs the `api:load`, `playlist:check` and `playlist:validate` scripts when a new pull request appears, and blocks the merge if it detects an error in it.
|
||||
- `format`: sequentially runs `api:load`, `playlist:format`, `playlist:lint` and `playlist:validate` scripts.
|
||||
- `update`: every day at 0:00 UTC sequentially runs `api:load`, `playlist:update`, `playlist:lint`, `playlist:validate`, `playlist:generate`, `api:generate` and `readme:update` scripts and deploys the output files if successful.
|
||||
# Contributing Guide
|
||||
|
||||
- [How to?](#how-to)
|
||||
- [Stream Description Scheme](#stream-description-scheme)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Scripts](#scripts)
|
||||
- [Workflows](#workflows)
|
||||
|
||||
## How to?
|
||||
|
||||
### How to add a new stream link to a playlists?
|
||||
|
||||
You have several options:
|
||||
|
||||
1. Create a new request using this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=streams:add&projects=&template=1_streams_add.yml&title=Add%3A+) and if approved, the link will automatically be added to the playlist on the next update.
|
||||
|
||||
2. Add the link to the playlist directly using a [pull request](https://github.com/iptv-org/iptv/pulls).
|
||||
|
||||
Regardless of which option you choose, before posting your request please do the following:
|
||||
|
||||
- Make sure the link you want to add works stably. To check this, open it in one of the players (for example, [VLC player](https://www.videolan.org/vlc/index.html)) and watch the broadcast for at least a minute (some test streams are interrupted after 15-30 seconds).
|
||||
- Make sure the link is not already in the playlist. This can be done by [searching](https://github.com/search?q=repo%3Aiptv-org%2Fiptv+http%3A%2F%2Fexample.com&type=code) the repository.
|
||||
- Find the ID of the channel you want on [iptv-org.github.io](https://iptv-org.github.io/). If your desired channel is not on the list you can leave a request to add it [here](https://github.com/iptv-org/database/issues/new/choose).
|
||||
- Make sure the channel is not blocklisted. It can also be done through [iptv-org.github.io](https://iptv-org.github.io/).
|
||||
- The link does not lead to the Xtream Codes server. [Why don't you accept links to Xtream Codes server?](FAQ.md#why-dont-you-accept-links-to-xtream-codes-server)
|
||||
- If you know that the broadcast only works in certain countries or it is periodically interrupted, do not forget to indicate this in the request.
|
||||
|
||||
A requests without a valid stream ID or working link to the stream will be closed immediately.
|
||||
|
||||
Note all links in playlists are sorted automatically by scripts so there is no need to sort them manually. For more info, see [Scripts](#scripts).
|
||||
|
||||
### How to fix the stream description?
|
||||
|
||||
Most of the stream description (channel name, feed name, categories, languages, broadcast area, logo) we load from the [iptv-org/database](https://github.com/iptv-org/database) using the stream ID.
|
||||
|
||||
So first of all, make sure that the desired stream has the correct ID. A full list of all supported channels and their corresponding IDs can be found on [iptv-org.github.io](https://iptv-org.github.io/). To change the stream ID of any link in the playlist, just fill out this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=streams%3Aedit&projects=&template=2_streams_edit.yml&title=Edit%3A+).
|
||||
|
||||
If, however, you have found an error in the database itself, this is the place to go: [How to edit channel description?](https://github.com/iptv-org/database/blob/master/CONTRIBUTING.md#how-to-edit-channel-description)
|
||||
|
||||
### How to distinguish a link to an Xtream Codes server from a regular one?
|
||||
|
||||
Most of them have this form:
|
||||
|
||||
`http(s)://{hostname}:{port}/{username}/{password}/{channelID}` (port is often `25461`)
|
||||
|
||||
To make sure that the link leads to the Xtream Codes server, copy the `hostname`, `port`, `username` and `password` into the link below and try to open it in a browser:
|
||||
|
||||
`http(s)://{hostname}:{port}/panel_api.php?username={username}&password={password}`
|
||||
|
||||
If the link answers, you're with an Xtream Codes server.
|
||||
|
||||
### How to report a broken stream?
|
||||
|
||||
Fill out this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=streams:remove&projects=&template=3_streams_report.yml&title=Broken%3A+) and as soon as a working replacement appears, we will add it to the playlist or at least remove the non-working one.
|
||||
|
||||
The only thing before publishing your report is to make sure that:
|
||||
|
||||
- The link is still in our playlists. You can verify this by [searching](https://github.com/search?q=repo%3Aiptv-org%2Fiptv+http%3A%2F%2Fexample.com&type=code) the repository.
|
||||
- The link really doesn't work and is not just [geo-blocked](https://en.wikipedia.org/wiki/Geo-blocking). To check this, you can either use a [VPN](https://en.wikipedia.org/wiki/Virtual_private_network) or services such as [streamtest.in](https://streamtest.in/).
|
||||
|
||||
An issue without a valid link will be closed immediately.
|
||||
|
||||
### How to find a broken stream?
|
||||
|
||||
For starters, you can just try to open the playlist in [VLC player](https://www.videolan.org/vlc/). The player outputs all errors to the log (Tools -> Messages) so you'll be able to determine pretty accurately why a link isn't working.
|
||||
|
||||
Another way to test links is to use the NPM script. To do this, first make sure you have [Node.js](https://nodejs.org/en) installed on your system. Then go to the `iptv` folder using [Console](https://en.wikipedia.org/wiki/Windows_Console) (or [Terminal](<https://en.wikipedia.org/wiki/Terminal_(macOS)>) if you have macOS) and run the command:
|
||||
|
||||
```sh
|
||||
npm run playlist:test path/to/playlist.m3u
|
||||
```
|
||||
|
||||
This command will run an automatic check of all links in the playlist and display their status:
|
||||
|
||||
```sh
|
||||
npm run playlist:test streams/fr.m3u
|
||||
|
||||
streams/fr.m3u
|
||||
┌─────┬───────────────────────────┬──────────────────────────────────────────────────────────────────────────────────────────────────────┬───────────────────────────┐
|
||||
│ │ tvg-id │ url │ status │
|
||||
├─────┼───────────────────────────┼──────────────────────────────────────────────────────────────────────────────────────────────────────┼───────────────────────────┤
|
||||
│ 0 │ 6ter.fr │ https://origin-caf900c010ea8046.live.6cloud.fr/out/v1/29c7a579af3348b48230f76cd75699a5/dash_short... │ LOADING... │
|
||||
│ 1 │ 20MinutesTV.fr │ https://lives.digiteka.com/stream/86d3e867-a272-496b-8412-f59aa0104771/index.m3u8 │ FFMPEG_STREAMS_NOT_FOUND │
|
||||
│ 2 │ │ https://video1.getstreamhosting.com:1936/8420/8420/playlist.m3u8 │ OK │
|
||||
│ 3 │ ADNTVPlus.fr │ https://samsunguk-adn-samsung-fre-qfrlc.amagi.tv/playlist/samsunguk-adn-samsung-fre/playlist.m3u8 │ HTTP_FORBIDDEN │
|
||||
│ 4 │ Africa24.fr │ https://edge12.vedge.infomaniak.com/livecast/ik:africa24/manifest.m3u8 │ OK │
|
||||
│ 5 │ Africa24English.fr │ https://edge17.vedge.infomaniak.com/livecast/ik:africa24sport/manifest.m3u8 │ OK │
|
||||
│ 6 │ AfricanewsEnglish.fr │ https://37c774660687468c821a51190046facf.mediatailor.us-east-1.amazonaws.com/v1/master/04fd913bb2... │ HTTP_GATEWAY_TIMEOUT │
|
||||
│ 7 │ AlpedHuezTV.fr │ https://edge.vedge.infomaniak.com/livecast/ik:adhtv/chunklist.m3u8 │ HTTP_NOT_FOUND │
|
||||
```
|
||||
|
||||
After that, all you have to do is report any broken streams you find.
|
||||
|
||||
### How to replace a broken stream?
|
||||
|
||||
This can be done either by filling out this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=streams%3Aedit&projects=&template=2_streams_edit.yml&title=Edit%3A+).
|
||||
|
||||
Either by directly updating the files in the [/streams](/streams) folder and then creating a [pull request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests).
|
||||
|
||||
### How to remove my channel from playlist?
|
||||
|
||||
To request removal of a link to a channel from the repository, you need to fill out this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=removal+request&projects=&template=6_copyright-claim.yml&title=Remove%3A+) and wait for the request to be reviewed (this usually takes no more than 1 business day). And if the request is approved, links to the channel will be immediately removed from the repository.
|
||||
|
||||
The channel will also be added to our [blocklist](https://github.com/iptv-org/database/blob/master/data/blocklist.csv) to avoid its appearance in our playlists in the future.
|
||||
|
||||
Please note that we only accept removal requests from channel owners and their official representatives, all other requests will be closed immediately.
|
||||
|
||||
## Stream Description Scheme
|
||||
|
||||
For a stream to be approved, its description must follow this template:
|
||||
|
||||
```
|
||||
#EXTINF:-1 tvg-id="STREAM_ID",STREAM_TITLE (QUALITY) [LABEL]
|
||||
STREAM_URL
|
||||
```
|
||||
|
||||
| Attribute | Description | Required | Valid values |
|
||||
| -------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------- | -------------------------------------------- |
|
||||
| `STREAM_ID` | Stream ID consisting of channel ID and feed ID. Full list of supported channels with corresponding ID could be found on [iptv-org.github.io](https://iptv-org.github.io/). | Optional | `<channel_id>` or `<channel_id>@<feed_id>` |
|
||||
| `STREAM_TITLE` | Stream title consisting of channel name and feed name. May contain any characters except: `,`, `[`, `]`. | Required | - |
|
||||
| `QUALITY` | Maximum stream quality. | Optional | `2160p`, `1080p`, `720p`, `480p`, `360p` etc |
|
||||
| `LABEL` | Specified in cases where the broadcast for some reason may not be available to some users. | Optional | `Geo-blocked` or `Not 24/7` |
|
||||
| `STREAM_URL` | Stream URL. | Required | - |
|
||||
|
||||
Example:
|
||||
|
||||
```xml
|
||||
#EXTINF:-1 tvg-id="ExampleTV.us@East",Example TV East (720p) [Not 24/7]
|
||||
https://example.com/playlist.m3u8
|
||||
```
|
||||
|
||||
Also, if necessary, you can specify custom [HTTP User-Agent](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent) and [HTTP Referrer](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referer) through additional attributes:
|
||||
|
||||
```xml
|
||||
#EXTINF:-1 tvg-id="ExampleTV.us" http-referrer="http://example.com/" http-user-agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64)",Example TV
|
||||
http://example.com/stream.m3u8
|
||||
```
|
||||
|
||||
or use player-specific directives:
|
||||
|
||||
_VLC_
|
||||
|
||||
```xml
|
||||
#EXTINF:-1 tvg-id="ExampleTV.us@VLC",Example TV
|
||||
#EXTVLCOPT:http-referrer=http://example.com/
|
||||
#EXTVLCOPT:http-user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64)
|
||||
http://example.com/stream.m3u8
|
||||
```
|
||||
|
||||
_Kodi_
|
||||
|
||||
```xml
|
||||
#EXTINF:-1 tvg-id="ExampleTV.us@Kodi",Example TV
|
||||
#KODIPROP:inputstream=inputstream.adaptive
|
||||
#KODIPROP:inputstream.adaptive.stream_headers=Referer=http://example.com/&User-Agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64)
|
||||
http://example.com/stream.m3u8
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
- `.github/`
|
||||
- `ISSUE_TEMPLATE/`: issue templates for the repository.
|
||||
- `workflows`: contains [GitHub actions](https://docs.github.com/en/actions/quickstart) workflows.
|
||||
- `CODE_OF_CONDUCT.md`: rules you shouldn't break if you don't want to get banned.
|
||||
- `.readme/`
|
||||
- `config.json`: config for the `markdown-include` package, which is used to compile everything into one `PLAYLISTS.md` file.
|
||||
- `preview.png`: image displayed in the `README.md`.
|
||||
- `template.md`: template for `PLAYLISTS.md`.
|
||||
- `scripts/`: contains all scripts used in the repository.
|
||||
- `streams/`: contains all streams broken down by the country from which they are broadcast.
|
||||
- `tests/`: contains tests to check the scripts.
|
||||
- `CONTRIBUTING.md`: file you are currently reading.
|
||||
- `PLAYLISTS.md`: auto-updated list of available playlists.
|
||||
- `README.md`: project description.
|
||||
|
||||
## Scripts
|
||||
|
||||
These scripts are created to automate routine processes in the repository and make it a bit easier to maintain.
|
||||
|
||||
For scripts to work, you must have [Node.js](https://nodejs.org/en) installed on your computer.
|
||||
|
||||
To run scripts use the `npm run <script-name>` command.
|
||||
|
||||
- `act:check`: allows to run the [check](https://github.com/iptv-org/iptv/blob/master/.github/workflows/check.yml) workflow locally. Depends on [nektos/act](https://github.com/nektos/act).
|
||||
- `act:format`: allows to test the [format](https://github.com/iptv-org/iptv/blob/master/.github/workflows/update.yml) workflow locally. Depends on [nektos/act](https://github.com/nektos/act).
|
||||
- `act:update`: allows to test the [update](https://github.com/iptv-org/iptv/blob/master/.github/workflows/update.yml) workflow locally. Depends on [nektos/act](https://github.com/nektos/act).
|
||||
- `api:load`: downloads the latest channel and stream data from the [iptv-org/api](https://github.com/iptv-org/api).
|
||||
- `api:generate`: generates a JSON file with all streams for the [iptv-org/api](https://github.com/iptv-org/api) repository.
|
||||
- `api:deploy`: allows to manually upload a JSON file created via `api:generate` to the [iptv-org/api](https://github.com/iptv-org/api) repository. To run the script you must provide your [personal access token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens) with write access to the repository.
|
||||
- `playlist:format`: formats internal playlists. The process includes [URL normalization](https://en.wikipedia.org/wiki/URI_normalization), duplicate removal, removing invalid id's and sorting links by channel name, quality, and label.
|
||||
- `playlist:update`: triggers an update of internal playlists. The process involves processing approved requests from issues.
|
||||
- `playlist:generate`: generates all public playlists.
|
||||
- `playlist:validate`: сhecks ids and links in internal playlists for errors.
|
||||
- `playlist:lint`: сhecks internal playlists for syntax errors.
|
||||
- `playlist:test`: tests links in internal playlists.
|
||||
- `playlist:edit`: utility for quick streams mapping.
|
||||
- `playlist:deploy`: allows to manually publish all generated via `playlist:generate` playlists. To run the script you must provide your [personal access token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens) with write access to the repository.
|
||||
- `readme:update`: updates the list of playlists in [README.md](README.md).
|
||||
- `report:create`: creates a report on current issues.
|
||||
- `check`: (shorthand) sequentially runs the `playlist:lint` and `playlist:validate` scripts.
|
||||
- `format`: (shorthand) runs the `playlist:format` script.
|
||||
- `update`: (shorthand) sequentially runs the `playlist:generate`, `api:generate` and `readme:update` scripts.
|
||||
- `deploy`: (shorthand) sequentially runs the `playlist:deploy` and `api:deploy` scripts.
|
||||
- `lint`: сhecks the scripts for syntax errors.
|
||||
- `test`: runs a test of all the scripts described above.
|
||||
|
||||
## Workflows
|
||||
|
||||
To automate the run of the scripts described above, we use the [GitHub Actions workflows](https://docs.github.com/en/actions/using-workflows).
|
||||
|
||||
Each workflow includes its own set of scripts that can be run either manually or in response to an event.
|
||||
|
||||
- `check`: sequentially runs the `api:load`, `playlist:check` and `playlist:validate` scripts when a new pull request appears, and blocks the merge if it detects an error in it.
|
||||
- `format`: sequentially runs `api:load`, `playlist:format`, `playlist:lint` and `playlist:validate` scripts.
|
||||
- `update`: every day at 0:00 UTC sequentially runs `api:load`, `playlist:update`, `playlist:lint`, `playlist:validate`, `playlist:generate`, `api:generate` and `readme:update` scripts and deploys the output files if successful.
|
||||
|
||||
46
FAQ.md
46
FAQ.md
@@ -1,23 +1,23 @@
|
||||
# Frequently Asked Questions
|
||||
|
||||
### My favorite channel is not on the playlist.
|
||||
|
||||
Start by asking our community for help via [Discussions](https://github.com/orgs/iptv-org/discussions). It is quite possible that someone already has a link to the channel you need and they just haven't added it to our playlist yet.
|
||||
|
||||
But keep in mind that not all TV channels are available for viewing online, and in this case there is little we can do about it.
|
||||
|
||||
### Are you planning to include a Video On Demand (VOD) to the playlist?
|
||||
|
||||
No.
|
||||
|
||||
### Why is the channel on the iptv-org.github.io but not in the playlist?
|
||||
|
||||
The site contains a list of all TV channels in the world and only those of them for which we have working stream links are included in the playlists.
|
||||
|
||||
### Can I add a radio broadcast?
|
||||
|
||||
Yes, if it is a [visual radio](https://en.wikipedia.org/wiki/Visual_radio) in which a video and audio are shown at the same time.
|
||||
|
||||
### Why don't you accept links to Xtream Codes server?
|
||||
|
||||
Xtream Codes streams tend to be very unstable, and often links to them fail very quickly, so it's easier for us to initially exclude them from the playlist than to search for expired ones every day.
|
||||
# Frequently Asked Questions
|
||||
|
||||
### My favorite channel is not on the playlist.
|
||||
|
||||
Start by asking our community for help via [Discussions](https://github.com/orgs/iptv-org/discussions). It is quite possible that someone already has a link to the channel you need and they just haven't added it to our playlist yet.
|
||||
|
||||
But keep in mind that not all TV channels are available for viewing online, and in this case there is little we can do about it.
|
||||
|
||||
### Are you planning to include a Video On Demand (VOD) to the playlist?
|
||||
|
||||
No.
|
||||
|
||||
### Why is the channel on the iptv-org.github.io but not in the playlist?
|
||||
|
||||
The site contains a list of all TV channels in the world and only those of them for which we have working stream links are included in the playlists.
|
||||
|
||||
### Can I add a radio broadcast?
|
||||
|
||||
Yes, if it is a [visual radio](https://en.wikipedia.org/wiki/Visual_radio) in which a video and audio are shown at the same time.
|
||||
|
||||
### Why don't you accept links to Xtream Codes server?
|
||||
|
||||
Xtream Codes streams tend to be very unstable, and often links to them fail very quickly, so it's easier for us to initially exclude them from the playlist than to search for expired ones every day.
|
||||
|
||||
48
LICENSE
48
LICENSE
@@ -1,24 +1,24 @@
|
||||
This is free and unencumbered software released into the public domain.
|
||||
|
||||
Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||
distribute this software, either in source code form or as a compiled
|
||||
binary, for any purpose, commercial or non-commercial, and by any
|
||||
means.
|
||||
|
||||
In jurisdictions that recognize copyright laws, the author or authors
|
||||
of this software dedicate any and all copyright interest in the
|
||||
software to the public domain. We make this dedication for the benefit
|
||||
of the public at large and to the detriment of our heirs and
|
||||
successors. We intend this dedication to be an overt act of
|
||||
relinquishment in perpetuity of all present and future rights to this
|
||||
software under copyright law.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
For more information, please refer to <http://unlicense.org/>
|
||||
This is free and unencumbered software released into the public domain.
|
||||
|
||||
Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||
distribute this software, either in source code form or as a compiled
|
||||
binary, for any purpose, commercial or non-commercial, and by any
|
||||
means.
|
||||
|
||||
In jurisdictions that recognize copyright laws, the author or authors
|
||||
of this software dedicate any and all copyright interest in the
|
||||
software to the public domain. We make this dedication for the benefit
|
||||
of the public at large and to the detriment of our heirs and
|
||||
successors. We intend this dedication to be an overt act of
|
||||
relinquishment in perpetuity of all present and future rights to this
|
||||
software under copyright law.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
For more information, please refer to <http://unlicense.org/>
|
||||
|
||||
176
PLAYLISTS.md
176
PLAYLISTS.md
@@ -1,22 +1,22 @@
|
||||
## Playlists
|
||||
|
||||
There are several versions of playlists that differ in the way they are grouped. As of January 30th, 2024, we have stopped distributing NSFW channels. For more information, please look at [this issue](https://github.com/iptv-org/iptv/issues/15723).
|
||||
|
||||
### Grouped by category
|
||||
|
||||
Playlists in which channels are grouped by category.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
<br>
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.category.m3u
|
||||
```
|
||||
|
||||
Same thing, but split up into separate files:
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
## Playlists
|
||||
|
||||
There are several versions of playlists that differ in the way they are grouped. As of January 30th, 2024, we have stopped distributing NSFW channels. For more information, please look at [this issue](https://github.com/iptv-org/iptv/issues/15723).
|
||||
|
||||
### Grouped by category
|
||||
|
||||
Playlists in which channels are grouped by category.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
<br>
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.category.m3u
|
||||
```
|
||||
|
||||
Same thing, but split up into separate files:
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th align="left">Category</th><th align="left">Channels</th><th align="left">Playlist</th></tr>
|
||||
@@ -54,25 +54,25 @@ Same thing, but split up into separate files:
|
||||
<tr><td>XXX</td><td align="right">0</td><td nowrap><code>https://iptv-org.github.io/iptv/categories/xxx.m3u</code></td></tr>
|
||||
<tr><td>Undefined</td><td align="right">3696</td><td nowrap><code>https://iptv-org.github.io/iptv/categories/undefined.m3u</code></td></tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
</details>
|
||||
|
||||
### Grouped by language
|
||||
|
||||
Playlists in which channels are grouped by the language in which they are broadcast.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
<br>
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.language.m3u
|
||||
```
|
||||
|
||||
Same thing, but split up into separate files:
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
</table>
|
||||
|
||||
</details>
|
||||
|
||||
### Grouped by language
|
||||
|
||||
Playlists in which channels are grouped by the language in which they are broadcast.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
<br>
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.language.m3u
|
||||
```
|
||||
|
||||
Same thing, but split up into separate files:
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th align="left">Language</th><th align="left">Channels</th><th align="left">Playlist</th></tr>
|
||||
@@ -292,26 +292,26 @@ Same thing, but split up into separate files:
|
||||
<tr><td align="left">Zulu</td><td align="right">1</td><td align="left" nowrap><code>https://iptv-org.github.io/iptv/languages/zul.m3u</code></td></tr>
|
||||
<tr><td align="left">Undefined</td><td align="right">2176</td><td align="left" nowrap><code>https://iptv-org.github.io/iptv/languages/undefined.m3u</code></td></tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
</details>
|
||||
|
||||
### Grouped by broadcast area
|
||||
|
||||
Playlists in which channels are grouped by broadcast area.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
|
||||
#### Countries
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.country.m3u
|
||||
```
|
||||
|
||||
Same thing, but split up into separate files:
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
</table>
|
||||
|
||||
</details>
|
||||
|
||||
### Grouped by broadcast area
|
||||
|
||||
Playlists in which channels are grouped by broadcast area.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
|
||||
#### Countries
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.country.m3u
|
||||
```
|
||||
|
||||
Same thing, but split up into separate files:
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
- 🇦🇫 Afghanistan <code>https://iptv-org.github.io/iptv/countries/af.m3u</code>
|
||||
- 🇦🇱 Albania <code>https://iptv-org.github.io/iptv/countries/al.m3u</code>
|
||||
- 🇩🇿 Algeria <code>https://iptv-org.github.io/iptv/countries/dz.m3u</code>
|
||||
@@ -1293,11 +1293,11 @@ Same thing, but split up into separate files:
|
||||
- 🇿🇲 Zambia <code>https://iptv-org.github.io/iptv/countries/zm.m3u</code>
|
||||
- 🇿🇼 Zimbabwe <code>https://iptv-org.github.io/iptv/countries/zw.m3u</code>
|
||||
- 🌐 International <code>https://iptv-org.github.io/iptv/countries/int.m3u</code>
|
||||
- Undefined <code>https://iptv-org.github.io/iptv/countries/undefined.m3u</code>
|
||||
|
||||
#### Regions
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
- Undefined <code>https://iptv-org.github.io/iptv/countries/undefined.m3u</code>
|
||||
|
||||
#### Regions
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
- Africa <code>https://iptv-org.github.io/iptv/regions/afr.m3u</code>
|
||||
- Americas <code>https://iptv-org.github.io/iptv/regions/amer.m3u</code>
|
||||
- Arab world <code>https://iptv-org.github.io/iptv/regions/arab.m3u</code>
|
||||
@@ -1339,28 +1339,28 @@ Same thing, but split up into separate files:
|
||||
- West Africa <code>https://iptv-org.github.io/iptv/regions/waf.m3u</code>
|
||||
- West Asia <code>https://iptv-org.github.io/iptv/regions/was.m3u</code>
|
||||
- Western Europe <code>https://iptv-org.github.io/iptv/regions/wer.m3u</code>
|
||||
- Worldwide <code>https://iptv-org.github.io/iptv/regions/ww.m3u</code>
|
||||
|
||||
</details>
|
||||
|
||||
### Grouped by sources
|
||||
|
||||
Playlists in which channels are grouped by broadcast source.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
<br>
|
||||
|
||||
To use the playlist, simply replace `<FILENAME>` in the link below with the name of one of the files in the [streams](streams) folder.
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/sources/<FILENAME>.m3u
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
Also, any of our internal playlists are available in raw form (without any filtering or sorting) at this link:
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/raw/<FILENAME>.m3u
|
||||
```
|
||||
- Worldwide <code>https://iptv-org.github.io/iptv/regions/ww.m3u</code>
|
||||
|
||||
</details>
|
||||
|
||||
### Grouped by sources
|
||||
|
||||
Playlists in which channels are grouped by broadcast source.
|
||||
|
||||
<details>
|
||||
<summary>Expand</summary>
|
||||
<br>
|
||||
|
||||
To use the playlist, simply replace `<FILENAME>` in the link below with the name of one of the files in the [streams](streams) folder.
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/sources/<FILENAME>.m3u
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
Also, any of our internal playlists are available in raw form (without any filtering or sorting) at this link:
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/raw/<FILENAME>.m3u
|
||||
```
|
||||
|
||||
158
README.md
158
README.md
@@ -1,79 +1,79 @@
|
||||
# IPTV [](https://github.com/iptv-org/iptv/actions/workflows/update.yml)
|
||||
|
||||
Collection of publicly available IPTV (Internet Protocol television) channels from all over the world.
|
||||
|
||||
## Table of contents
|
||||
|
||||
- 🚀 [How to use?](#how-to-use)
|
||||
- 📺 [Playlists](#playlists)
|
||||
- 🗓 [EPG](#epg)
|
||||
- 🗄 [Database](#database)
|
||||
- 👨💻 [API](#api)
|
||||
- 📚 [Resources](#resources)
|
||||
- 💬 [Discussions](#discussions)
|
||||
- ❓ [FAQ](#faq)
|
||||
- 🛠 [Contribution](#contribution)
|
||||
- ⚖ [Legal](#legal)
|
||||
- © [License](#license)
|
||||
|
||||
## How to use?
|
||||
|
||||
Simply paste the link to one of the playlists into [any video player](https://github.com/iptv-org/awesome-iptv#apps) that supports live streaming and press _Open_.
|
||||
|
||||

|
||||
|
||||
## Playlists
|
||||
|
||||
The main playlist containing all channels available in the repository can be found at:
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.m3u
|
||||
```
|
||||
|
||||
Links to other playlists can be found in the [PLAYLISTS.md](PLAYLISTS.md) file.
|
||||
|
||||
## EPG
|
||||
|
||||
[Electronic Program Guide](https://en.wikipedia.org/wiki/Electronic_program_guide) for most of the channels can be downloaded using utilities published in the [iptv-org/epg](https://github.com/iptv-org/epg) repository.
|
||||
|
||||
## Database
|
||||
|
||||
All channel data is taken from the [iptv-org/database](https://github.com/iptv-org/database) repository. If you find any errors please open a new [issue](https://github.com/iptv-org/database/issues) there.
|
||||
|
||||
## API
|
||||
|
||||
The API documentation can be found in the [iptv-org/api](https://github.com/iptv-org/api) repository.
|
||||
|
||||
## Resources
|
||||
|
||||
Links to other useful IPTV-related resources can be found in the [iptv-org/awesome-iptv](https://github.com/iptv-org/awesome-iptv) repository.
|
||||
|
||||
## Discussions
|
||||
|
||||
If you need help finding a channel, have a question or idea, welcome to the [Discussions](https://github.com/orgs/iptv-org/discussions).
|
||||
|
||||
## FAQ
|
||||
|
||||
The answers to the most popular questions can be found in the [FAQ.md](FAQ.md) file.
|
||||
|
||||
## Contribution
|
||||
|
||||
Please make sure to read the [Contributing Guide](CONTRIBUTING.md) before sending an issue or making a pull request.
|
||||
|
||||
And thank you to everyone who has already contributed!
|
||||
|
||||
### Backers
|
||||
|
||||
<a href="https://opencollective.com/iptv-org"><img src="https://opencollective.com/iptv-org/backers.svg?width=890" /></a>
|
||||
|
||||
### Contributors
|
||||
|
||||
<a href="https://github.com/iptv-org/iptv/graphs/contributors"><img src="https://opencollective.com/iptv-org/contributors.svg?width=890" /></a>
|
||||
|
||||
## Legal
|
||||
|
||||
No video files are stored in this repository. The repository simply contains user-submitted links to publicly available video stream URLs, which to the best of our knowledge have been intentionally made publicly by the copyright holders. If any links in these playlists infringe on your rights as a copyright holder, they may be removed by sending a [pull request](https://github.com/iptv-org/iptv/pulls) or opening an [issue](https://github.com/iptv-org/iptv/issues/new?assignees=freearhey&labels=removal+request&template=--removal-request.yml&title=Remove%3A+). However, note that we have **no control** over the destination of the link, and just removing the link from the playlist will not remove its contents from the web. Note that linking does not directly infringe copyright because no copy is made on the site providing the link, and thus this is **not** a valid reason to send a DMCA notice to GitHub. To remove this content from the web, you should contact the web host that's actually hosting the content (**not** GitHub, nor the maintainers of this repository).
|
||||
|
||||
## License
|
||||
|
||||
[](LICENSE)
|
||||
# IPTV [](https://github.com/iptv-org/iptv/actions/workflows/update.yml)
|
||||
|
||||
Collection of publicly available IPTV (Internet Protocol television) channels from all over the world.
|
||||
|
||||
## Table of contents
|
||||
|
||||
- 🚀 [How to use?](#how-to-use)
|
||||
- 📺 [Playlists](#playlists)
|
||||
- 🗓 [EPG](#epg)
|
||||
- 🗄 [Database](#database)
|
||||
- 👨💻 [API](#api)
|
||||
- 📚 [Resources](#resources)
|
||||
- 💬 [Discussions](#discussions)
|
||||
- ❓ [FAQ](#faq)
|
||||
- 🛠 [Contribution](#contribution)
|
||||
- ⚖ [Legal](#legal)
|
||||
- © [License](#license)
|
||||
|
||||
## How to use?
|
||||
|
||||
Simply paste the link to one of the playlists into [any video player](https://github.com/iptv-org/awesome-iptv#apps) that supports live streaming and press _Open_.
|
||||
|
||||

|
||||
|
||||
## Playlists
|
||||
|
||||
The main playlist containing all channels available in the repository can be found at:
|
||||
|
||||
```
|
||||
https://iptv-org.github.io/iptv/index.m3u
|
||||
```
|
||||
|
||||
Links to other playlists can be found in the [PLAYLISTS.md](PLAYLISTS.md) file.
|
||||
|
||||
## EPG
|
||||
|
||||
[Electronic Program Guide](https://en.wikipedia.org/wiki/Electronic_program_guide) for most of the channels can be downloaded using utilities published in the [iptv-org/epg](https://github.com/iptv-org/epg) repository.
|
||||
|
||||
## Database
|
||||
|
||||
All channel data is taken from the [iptv-org/database](https://github.com/iptv-org/database) repository. If you find any errors please open a new [issue](https://github.com/iptv-org/database/issues) there.
|
||||
|
||||
## API
|
||||
|
||||
The API documentation can be found in the [iptv-org/api](https://github.com/iptv-org/api) repository.
|
||||
|
||||
## Resources
|
||||
|
||||
Links to other useful IPTV-related resources can be found in the [iptv-org/awesome-iptv](https://github.com/iptv-org/awesome-iptv) repository.
|
||||
|
||||
## Discussions
|
||||
|
||||
If you need help finding a channel, have a question or idea, welcome to the [Discussions](https://github.com/orgs/iptv-org/discussions).
|
||||
|
||||
## FAQ
|
||||
|
||||
The answers to the most popular questions can be found in the [FAQ.md](FAQ.md) file.
|
||||
|
||||
## Contribution
|
||||
|
||||
Please make sure to read the [Contributing Guide](CONTRIBUTING.md) before sending an issue or making a pull request.
|
||||
|
||||
And thank you to everyone who has already contributed!
|
||||
|
||||
### Backers
|
||||
|
||||
<a href="https://opencollective.com/iptv-org"><img src="https://opencollective.com/iptv-org/backers.svg?width=890" /></a>
|
||||
|
||||
### Contributors
|
||||
|
||||
<a href="https://github.com/iptv-org/iptv/graphs/contributors"><img src="https://opencollective.com/iptv-org/contributors.svg?width=890" /></a>
|
||||
|
||||
## Legal
|
||||
|
||||
No video files are stored in this repository. The repository simply contains user-submitted links to publicly available video stream URLs, which to the best of our knowledge have been intentionally made publicly by the copyright holders. If any links in these playlists infringe on your rights as a copyright holder, they may be removed by sending a [pull request](https://github.com/iptv-org/iptv/pulls) or opening an [issue](https://github.com/iptv-org/iptv/issues/new?assignees=freearhey&labels=removal+request&template=--removal-request.yml&title=Remove%3A+). However, note that we have **no control** over the destination of the link, and just removing the link from the playlist will not remove its contents from the web. Note that linking does not directly infringe copyright because no copy is made on the site providing the link, and thus this is **not** a valid reason to send a DMCA notice to GitHub. To remove this content from the web, you should contact the web host that's actually hosting the content (**not** GitHub, nor the maintainers of this repository).
|
||||
|
||||
## License
|
||||
|
||||
[](LICENSE)
|
||||
|
||||
@@ -1,56 +1,56 @@
|
||||
import typescriptEslint from '@typescript-eslint/eslint-plugin'
|
||||
import globals from 'globals'
|
||||
import tsParser from '@typescript-eslint/parser'
|
||||
import path from 'node:path'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import js from '@eslint/js'
|
||||
import stylistic from '@stylistic/eslint-plugin'
|
||||
import { FlatCompat } from '@eslint/eslintrc'
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url)
|
||||
const __dirname = path.dirname(__filename)
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all
|
||||
})
|
||||
|
||||
export default [
|
||||
...compat.extends('eslint:recommended', 'plugin:@typescript-eslint/recommended'),
|
||||
{
|
||||
plugins: {
|
||||
'@typescript-eslint': typescriptEslint,
|
||||
'@stylistic': stylistic
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.browser
|
||||
},
|
||||
|
||||
parser: tsParser,
|
||||
ecmaVersion: 'latest',
|
||||
sourceType: 'module'
|
||||
},
|
||||
|
||||
rules: {
|
||||
'no-case-declarations': 'off',
|
||||
|
||||
indent: [
|
||||
'error',
|
||||
2,
|
||||
{
|
||||
SwitchCase: 1
|
||||
}
|
||||
],
|
||||
|
||||
'@stylistic/linebreak-style': ['error', 'windows'],
|
||||
quotes: ['error', 'single'],
|
||||
semi: ['error', 'never']
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
ignores: ['tests/__data__/**']
|
||||
}
|
||||
]
|
||||
import typescriptEslint from '@typescript-eslint/eslint-plugin'
|
||||
import stylistic from '@stylistic/eslint-plugin'
|
||||
import tsParser from '@typescript-eslint/parser'
|
||||
import { FlatCompat } from '@eslint/eslintrc'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import globals from 'globals'
|
||||
import path from 'node:path'
|
||||
import js from '@eslint/js'
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url)
|
||||
const __dirname = path.dirname(__filename)
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all
|
||||
})
|
||||
|
||||
export default [
|
||||
...compat.extends('eslint:recommended', 'plugin:@typescript-eslint/recommended'),
|
||||
{
|
||||
plugins: {
|
||||
'@typescript-eslint': typescriptEslint,
|
||||
'@stylistic': stylistic
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.browser
|
||||
},
|
||||
|
||||
parser: tsParser,
|
||||
ecmaVersion: 'latest',
|
||||
sourceType: 'module'
|
||||
},
|
||||
|
||||
rules: {
|
||||
'no-case-declarations': 'off',
|
||||
|
||||
indent: [
|
||||
'error',
|
||||
2,
|
||||
{
|
||||
SwitchCase: 1
|
||||
}
|
||||
],
|
||||
|
||||
'@stylistic/linebreak-style': ['error', 'windows'],
|
||||
quotes: ['error', 'single'],
|
||||
semi: ['error', 'never']
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
ignores: ['tests/__data__/**']
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
{
|
||||
"files": ["streams/*.m3u"],
|
||||
"rules": {
|
||||
"no-empty-lines": true,
|
||||
"require-header": true,
|
||||
"attribute-quotes": true,
|
||||
"require-info": true,
|
||||
"require-title": true,
|
||||
"no-trailing-spaces": false,
|
||||
"no-whitespace-before-title": true,
|
||||
"no-multi-spaces": true,
|
||||
"no-extra-comma": true,
|
||||
"space-before-paren": true,
|
||||
"no-dash": true,
|
||||
"require-link": true
|
||||
}
|
||||
}
|
||||
{
|
||||
"files": ["streams/*.m3u"],
|
||||
"rules": {
|
||||
"no-empty-lines": true,
|
||||
"require-header": true,
|
||||
"attribute-quotes": true,
|
||||
"require-info": true,
|
||||
"require-title": true,
|
||||
"no-trailing-spaces": false,
|
||||
"no-whitespace-before-title": true,
|
||||
"no-multi-spaces": true,
|
||||
"no-extra-comma": true,
|
||||
"space-before-paren": true,
|
||||
"no-dash": true,
|
||||
"require-link": true
|
||||
}
|
||||
}
|
||||
|
||||
15752
package-lock.json
generated
15752
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
164
package.json
164
package.json
@@ -1,80 +1,84 @@
|
||||
{
|
||||
"name": "iptv",
|
||||
"scripts": {
|
||||
"act:check": "act pull_request -W .github/workflows/check.yml",
|
||||
"act:format": "act workflow_dispatch -W .github/workflows/format.yml",
|
||||
"act:update": "act workflow_dispatch -W .github/workflows/update.yml",
|
||||
"api:load": "tsx scripts/commands/api/load.ts",
|
||||
"api:generate": "tsx scripts/commands/api/generate.ts",
|
||||
"api:deploy": "npx gh-pages-clean && npx gh-pages -a -m \"Deploy to iptv-org/api\" -d .api -r https://$GITHUB_TOKEN@github.com/iptv-org/api.git",
|
||||
"playlist:format": "tsx scripts/commands/playlist/format.ts",
|
||||
"playlist:update": "tsx scripts/commands/playlist/update.ts",
|
||||
"playlist:generate": "tsx scripts/commands/playlist/generate.ts",
|
||||
"playlist:validate": "tsx scripts/commands/playlist/validate.ts",
|
||||
"playlist:lint": "npx m3u-linter -c m3u-linter.json",
|
||||
"playlist:test": "tsx scripts/commands/playlist/test.ts",
|
||||
"playlist:edit": "tsx scripts/commands/playlist/edit.ts",
|
||||
"playlist:deploy": "npx gh-pages-clean && npx gh-pages -m \"Deploy to GitHub Pages\" -d .gh-pages -r https://$GITHUB_TOKEN@github.com/iptv-org/iptv.git",
|
||||
"readme:update": "tsx scripts/commands/readme/update.ts",
|
||||
"report:create": "tsx scripts/commands/report/create.ts",
|
||||
"check": "npm run playlist:lint && npm run playlist:validate",
|
||||
"format": "npm run playlist:format",
|
||||
"update": "npm run playlist:generate && npm run api:generate && npm run readme:update",
|
||||
"deploy": "npm run playlist:deploy && npm run api:deploy",
|
||||
"lint": "npx eslint \"scripts/**/*.{ts,js}\" \"tests/**/*.{ts,js}\"",
|
||||
"test": "jest --runInBand",
|
||||
"postinstall": "npm run api:load"
|
||||
},
|
||||
"jest": {
|
||||
"transform": {
|
||||
"^.+\\.ts$": "@swc/jest"
|
||||
},
|
||||
"testRegex": "tests/(.*?/)?.*test.ts$",
|
||||
"setupFilesAfterEnv": [
|
||||
"jest-expect-message"
|
||||
]
|
||||
},
|
||||
"author": "Arhey",
|
||||
"private": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@alex_neo/jest-expect-message": "^1.0.5",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.32.0",
|
||||
"@freearhey/core": "^0.10.2",
|
||||
"@freearhey/search-js": "^0.1.2",
|
||||
"@inquirer/prompts": "^7.8.0",
|
||||
"@octokit/core": "^7.0.3",
|
||||
"@octokit/plugin-paginate-rest": "^13.1.1",
|
||||
"@octokit/plugin-rest-endpoint-methods": "^16.0.0",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@stylistic/eslint-plugin": "^5.2.2",
|
||||
"@swc/jest": "^0.2.39",
|
||||
"@types/async": "^3.2.25",
|
||||
"@types/cli-progress": "^3.11.6",
|
||||
"@types/fs-extra": "^11.0.4",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/lodash.uniqueid": "^4.0.9",
|
||||
"@typescript-eslint/eslint-plugin": "^8.38.0",
|
||||
"@typescript-eslint/parser": "^8.38.0",
|
||||
"async-es": "^3.2.6",
|
||||
"axios": "^1.11.0",
|
||||
"chalk": "^5.4.1",
|
||||
"cli-progress": "^3.12.0",
|
||||
"commander": "^14.0.0",
|
||||
"console-table-printer": "^2.14.6",
|
||||
"cross-env": "^10.0.0",
|
||||
"eslint": "^9.32.0",
|
||||
"glob": "^11.0.3",
|
||||
"globals": "^16.3.0",
|
||||
"iptv-playlist-parser": "^0.15.0",
|
||||
"jest": "^30.0.5",
|
||||
"jest-expect-message": "^1.1.3",
|
||||
"lodash.uniqueid": "^4.0.1",
|
||||
"m3u-linter": "^0.4.2",
|
||||
"mediainfo.js": "^0.3.6",
|
||||
"node-cleanup": "^2.1.2",
|
||||
"socks-proxy-agent": "^8.0.5",
|
||||
"tsx": "^4.20.3"
|
||||
}
|
||||
}
|
||||
{
|
||||
"name": "iptv",
|
||||
"scripts": {
|
||||
"act:check": "act pull_request -W .github/workflows/check.yml -s GITHUB_TOKEN=\"$(gh auth token)\"",
|
||||
"act:format": "act workflow_dispatch -W .github/workflows/format.yml -s GITHUB_TOKEN=\"$(gh auth token)\"",
|
||||
"act:update": "act workflow_dispatch -W .github/workflows/update.yml -s GITHUB_TOKEN=\"$(gh auth token)\"",
|
||||
"api:load": "tsx scripts/commands/api/load.ts",
|
||||
"api:generate": "tsx scripts/commands/api/generate.ts",
|
||||
"api:deploy": "npx gh-pages-clean && npx gh-pages -a -m \"Deploy to iptv-org/api\" -d .api -r https://$GITHUB_TOKEN@github.com/iptv-org/api.git",
|
||||
"playlist:format": "tsx scripts/commands/playlist/format.ts",
|
||||
"playlist:update": "tsx scripts/commands/playlist/update.ts",
|
||||
"playlist:generate": "tsx scripts/commands/playlist/generate.ts",
|
||||
"playlist:validate": "tsx scripts/commands/playlist/validate.ts",
|
||||
"playlist:lint": "npx m3u-linter -c m3u-linter.json",
|
||||
"playlist:test": "tsx scripts/commands/playlist/test.ts",
|
||||
"playlist:edit": "tsx scripts/commands/playlist/edit.ts",
|
||||
"playlist:deploy": "npx gh-pages-clean && npx gh-pages -m \"Deploy to GitHub Pages\" -d .gh-pages -r https://$GITHUB_TOKEN@github.com/iptv-org/iptv.git",
|
||||
"readme:update": "tsx scripts/commands/readme/update.ts",
|
||||
"report:create": "tsx scripts/commands/report/create.ts",
|
||||
"check": "npm run playlist:lint && npm run playlist:validate",
|
||||
"format": "npm run playlist:format",
|
||||
"update": "npm run playlist:generate && npm run api:generate && npm run readme:update",
|
||||
"deploy": "npm run playlist:deploy && npm run api:deploy",
|
||||
"lint": "npx eslint \"scripts/**/*.{ts,js}\" \"tests/**/*.{ts,js}\"",
|
||||
"test": "jest --runInBand",
|
||||
"postinstall": "npm run api:load"
|
||||
},
|
||||
"jest": {
|
||||
"transform": {
|
||||
"^.+\\.ts$": "@swc/jest"
|
||||
},
|
||||
"testRegex": "tests/(.*?/)?.*test.ts$",
|
||||
"setupFilesAfterEnv": [
|
||||
"jest-expect-message"
|
||||
]
|
||||
},
|
||||
"author": "Arhey",
|
||||
"private": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@alex_neo/jest-expect-message": "^1.0.5",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.32.0",
|
||||
"@freearhey/core": "^0.14.3",
|
||||
"@freearhey/search-js": "^0.1.2",
|
||||
"@freearhey/storage-js": "^0.1.0",
|
||||
"@inquirer/prompts": "^7.8.0",
|
||||
"@iptv-org/sdk": "^1.0.2",
|
||||
"@octokit/core": "^7.0.3",
|
||||
"@octokit/plugin-paginate-rest": "^13.1.1",
|
||||
"@octokit/plugin-rest-endpoint-methods": "^16.0.0",
|
||||
"@octokit/types": "^14.1.0",
|
||||
"@stylistic/eslint-plugin": "^5.2.2",
|
||||
"@swc/jest": "^0.2.39",
|
||||
"@types/async": "^3.2.25",
|
||||
"@types/cli-progress": "^3.11.6",
|
||||
"@types/fs-extra": "^11.0.4",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/lodash.uniqueid": "^4.0.9",
|
||||
"@types/node-cleanup": "^2.1.5",
|
||||
"@typescript-eslint/eslint-plugin": "^8.38.0",
|
||||
"@typescript-eslint/parser": "^8.38.0",
|
||||
"async": "^3.2.6",
|
||||
"axios": "^1.11.0",
|
||||
"chalk": "^5.4.1",
|
||||
"cli-progress": "^3.12.0",
|
||||
"commander": "^14.0.0",
|
||||
"console-table-printer": "^2.14.6",
|
||||
"cross-env": "^10.0.0",
|
||||
"eslint": "^9.32.0",
|
||||
"glob": "^11.0.3",
|
||||
"globals": "^16.3.0",
|
||||
"iptv-playlist-parser": "^0.15.1",
|
||||
"jest": "^30.0.5",
|
||||
"jest-expect-message": "^1.1.3",
|
||||
"lodash.uniqueid": "^4.0.1",
|
||||
"m3u-linter": "^0.4.2",
|
||||
"mediainfo.js": "^0.3.6",
|
||||
"node-cleanup": "^2.1.2",
|
||||
"normalize-url": "^8.1.0",
|
||||
"socks-proxy-agent": "^8.0.5",
|
||||
"tsx": "^4.20.3"
|
||||
}
|
||||
}
|
||||
|
||||
151
scripts/api.ts
Normal file
151
scripts/api.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { DATA_DIR } from './constants'
|
||||
import cliProgress from 'cli-progress'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
|
||||
const data = {
|
||||
categoriesKeyById: new Dictionary<sdk.Models.Category>(),
|
||||
countriesKeyByCode: new Dictionary<sdk.Models.Country>(),
|
||||
subdivisionsKeyByCode: new Dictionary<sdk.Models.Subdivision>(),
|
||||
citiesKeyByCode: new Dictionary<sdk.Models.City>(),
|
||||
regionsKeyByCode: new Dictionary<sdk.Models.Region>(),
|
||||
languagesKeyByCode: new Dictionary<sdk.Models.Language>(),
|
||||
channelsKeyById: new Dictionary<sdk.Models.Channel>(),
|
||||
feedsKeyByStreamId: new Dictionary<sdk.Models.Feed>(),
|
||||
feedsGroupedByChannel: new Dictionary<sdk.Models.Feed[]>(),
|
||||
blocklistRecordsGroupedByChannel: new Dictionary<sdk.Models.BlocklistRecord[]>(),
|
||||
categories: new Collection<sdk.Models.Category>(),
|
||||
countries: new Collection<sdk.Models.Country>(),
|
||||
subdivisions: new Collection<sdk.Models.Subdivision>(),
|
||||
cities: new Collection<sdk.Models.City>(),
|
||||
regions: new Collection<sdk.Models.Region>()
|
||||
}
|
||||
|
||||
let searchIndex
|
||||
|
||||
async function loadData() {
|
||||
const dataManager = new sdk.DataManager({ dataDir: DATA_DIR })
|
||||
await dataManager.loadFromDisk()
|
||||
dataManager.processData()
|
||||
|
||||
const {
|
||||
channels,
|
||||
feeds,
|
||||
categories,
|
||||
languages,
|
||||
countries,
|
||||
subdivisions,
|
||||
cities,
|
||||
regions,
|
||||
blocklist
|
||||
} = dataManager.getProcessedData()
|
||||
|
||||
searchIndex = sdk.SearchEngine.createIndex<sdk.Models.Channel>(channels)
|
||||
|
||||
data.categoriesKeyById = categories.keyBy((category: sdk.Models.Category) => category.id)
|
||||
data.countriesKeyByCode = countries.keyBy((country: sdk.Models.Country) => country.code)
|
||||
data.subdivisionsKeyByCode = subdivisions.keyBy(
|
||||
(subdivision: sdk.Models.Subdivision) => subdivision.code
|
||||
)
|
||||
data.citiesKeyByCode = cities.keyBy((city: sdk.Models.City) => city.code)
|
||||
data.regionsKeyByCode = regions.keyBy((region: sdk.Models.Region) => region.code)
|
||||
data.languagesKeyByCode = languages.keyBy((language: sdk.Models.Language) => language.code)
|
||||
data.channelsKeyById = channels.keyBy((channel: sdk.Models.Channel) => channel.id)
|
||||
data.feedsKeyByStreamId = feeds.keyBy((feed: sdk.Models.Feed) => feed.getStreamId())
|
||||
data.feedsGroupedByChannel = feeds.groupBy((feed: sdk.Models.Feed) => feed.channel)
|
||||
data.blocklistRecordsGroupedByChannel = blocklist.groupBy(
|
||||
(blocklistRecord: sdk.Models.BlocklistRecord) => blocklistRecord.channel
|
||||
)
|
||||
data.categories = categories
|
||||
data.countries = countries
|
||||
data.subdivisions = subdivisions
|
||||
data.cities = cities
|
||||
data.regions = regions
|
||||
}
|
||||
|
||||
async function downloadData() {
|
||||
function formatBytes(bytes: number) {
|
||||
if (bytes === 0) return '0 B'
|
||||
const k = 1024
|
||||
const sizes = ['B', 'KB', 'MB', 'GB']
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i]
|
||||
}
|
||||
|
||||
const files = [
|
||||
'blocklist',
|
||||
'categories',
|
||||
'channels',
|
||||
'cities',
|
||||
'countries',
|
||||
'feeds',
|
||||
'guides',
|
||||
'languages',
|
||||
'logos',
|
||||
'regions',
|
||||
'streams',
|
||||
'subdivisions',
|
||||
'timezones'
|
||||
]
|
||||
|
||||
const multiBar = new cliProgress.MultiBar({
|
||||
stopOnComplete: true,
|
||||
hideCursor: true,
|
||||
forceRedraw: true,
|
||||
barsize: 36,
|
||||
format(options, params, payload) {
|
||||
const filename = payload.filename.padEnd(18, ' ')
|
||||
const barsize = options.barsize || 40
|
||||
const percent = (params.progress * 100).toFixed(2)
|
||||
const speed = payload.speed ? formatBytes(payload.speed) + '/s' : 'N/A'
|
||||
const total = formatBytes(params.total)
|
||||
const completeSize = Math.round(params.progress * barsize)
|
||||
const incompleteSize = barsize - completeSize
|
||||
const bar =
|
||||
options.barCompleteString && options.barIncompleteString
|
||||
? options.barCompleteString.substr(0, completeSize) +
|
||||
options.barGlue +
|
||||
options.barIncompleteString.substr(0, incompleteSize)
|
||||
: '-'.repeat(barsize)
|
||||
|
||||
return `${filename} [${bar}] ${percent}% | ETA: ${params.eta}s | ${total} | ${speed}`
|
||||
}
|
||||
})
|
||||
|
||||
const dataManager = new sdk.DataManager({ dataDir: DATA_DIR })
|
||||
|
||||
const requests: Promise<unknown>[] = []
|
||||
for (const basename of files) {
|
||||
const filename = `${basename}.json`
|
||||
const progressBar = multiBar.create(0, 0, { filename })
|
||||
const request = dataManager.downloadFileToDisk(basename, {
|
||||
onDownloadProgress({ total, loaded, rate }) {
|
||||
if (total) progressBar.setTotal(total)
|
||||
progressBar.update(loaded, { speed: rate })
|
||||
}
|
||||
})
|
||||
|
||||
requests.push(request)
|
||||
}
|
||||
|
||||
await Promise.allSettled(requests).catch(console.error)
|
||||
}
|
||||
|
||||
function searchChannels(query: string): Collection<sdk.Models.Channel> {
|
||||
if (!searchIndex) return new Collection<sdk.Models.Channel>()
|
||||
|
||||
const results = searchIndex.search(query)
|
||||
|
||||
const channels = new Collection<sdk.Models.Channel>()
|
||||
|
||||
new Collection<sdk.Types.ChannelSearchableData>(results).forEach(
|
||||
(item: sdk.Types.ChannelSearchableData) => {
|
||||
const channel = data.channelsKeyById.get(item.id)
|
||||
if (channel) channels.add(channel)
|
||||
}
|
||||
)
|
||||
|
||||
return channels
|
||||
}
|
||||
|
||||
export { data, loadData, downloadData, searchChannels }
|
||||
@@ -1,39 +1,31 @@
|
||||
import { DataLoader, DataProcessor, PlaylistParser } from '../../core'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { API_DIR, STREAMS_DIR, DATA_DIR } from '../../constants'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import { Logger, Storage } from '@freearhey/core'
|
||||
import { Stream } from '../../models'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const dataLoader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await dataLoader.load()
|
||||
const { channelsKeyById, feedsGroupedByChannelId, logosGroupedByStreamId }: DataProcessorData =
|
||||
processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
channelsKeyById,
|
||||
logosGroupedByStreamId,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
let streams = await parser.parse(files)
|
||||
streams = streams
|
||||
.orderBy((stream: Stream) => stream.getId())
|
||||
.map((stream: Stream) => stream.toJSON())
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
||||
logger.info('saving to .api/streams.json...')
|
||||
const apiStorage = new Storage(API_DIR)
|
||||
await apiStorage.save('streams.json', streams.toJSON())
|
||||
}
|
||||
|
||||
main()
|
||||
import { API_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { PlaylistParser } from '../../core'
|
||||
import { Logger } from '@freearhey/core'
|
||||
import { Stream } from '../../models'
|
||||
import { loadData } from '../../api'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
const parsed = await parser.parse(files)
|
||||
const _streams = parsed
|
||||
.sortBy((stream: Stream) => stream.getId())
|
||||
.map((stream: Stream) => stream.toObject())
|
||||
logger.info(`found ${_streams.count()} streams`)
|
||||
|
||||
logger.info('saving to .api/streams.json...')
|
||||
const apiStorage = new Storage(API_DIR)
|
||||
await apiStorage.save('streams.json', _streams.toJSON())
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,26 +1,7 @@
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import { Storage } from '@freearhey/core'
|
||||
import { DataLoader } from '../../core'
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage })
|
||||
|
||||
await Promise.all([
|
||||
loader.download('blocklist.json'),
|
||||
loader.download('categories.json'),
|
||||
loader.download('channels.json'),
|
||||
loader.download('countries.json'),
|
||||
loader.download('languages.json'),
|
||||
loader.download('regions.json'),
|
||||
loader.download('subdivisions.json'),
|
||||
loader.download('feeds.json'),
|
||||
loader.download('logos.json'),
|
||||
loader.download('timezones.json'),
|
||||
loader.download('guides.json'),
|
||||
loader.download('streams.json'),
|
||||
loader.download('cities.json')
|
||||
])
|
||||
}
|
||||
|
||||
main()
|
||||
import { downloadData } from '../../api'
|
||||
|
||||
async function main() {
|
||||
await downloadData()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,217 +1,190 @@
|
||||
import { Storage, Collection, Logger, Dictionary } from '@freearhey/core'
|
||||
import { DataLoader, DataProcessor, PlaylistParser } from '../../core'
|
||||
import type { ChannelSearchableData } from '../../types/channel'
|
||||
import { Channel, Feed, Playlist, Stream } from '../../models'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { select, input } from '@inquirer/prompts'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import nodeCleanup from 'node-cleanup'
|
||||
import sjs from '@freearhey/search-js'
|
||||
import { Command } from 'commander'
|
||||
import readline from 'readline'
|
||||
|
||||
type ChoiceValue = { type: string; value?: Feed | Channel }
|
||||
type Choice = { name: string; short?: string; value: ChoiceValue; default?: boolean }
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
readline
|
||||
.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
})
|
||||
.on('SIGINT', function () {
|
||||
process.emit('SIGINT')
|
||||
})
|
||||
}
|
||||
|
||||
const program = new Command()
|
||||
|
||||
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
|
||||
|
||||
const filepath = program.args[0]
|
||||
const logger = new Logger()
|
||||
const storage = new Storage()
|
||||
let parsedStreams = new Collection()
|
||||
|
||||
main(filepath)
|
||||
nodeCleanup(() => {
|
||||
save(filepath)
|
||||
})
|
||||
|
||||
export default async function main(filepath: string) {
|
||||
if (!(await storage.exists(filepath))) {
|
||||
throw new Error(`File "${filepath}" does not exists`)
|
||||
}
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const {
|
||||
channels,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByStreamId
|
||||
}: DataProcessorData = processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const parser = new PlaylistParser({
|
||||
storage,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByStreamId,
|
||||
channelsKeyById
|
||||
})
|
||||
parsedStreams = await parser.parseFile(filepath)
|
||||
const streamsWithoutId = parsedStreams.filter((stream: Stream) => !stream.id)
|
||||
|
||||
logger.info(
|
||||
`found ${parsedStreams.count()} streams (including ${streamsWithoutId.count()} without ID)`
|
||||
)
|
||||
|
||||
logger.info('creating search index...')
|
||||
const items = channels.map((channel: Channel) => channel.getSearchable()).all()
|
||||
const searchIndex = sjs.createIndex(items, {
|
||||
searchable: ['name', 'altNames', 'guideNames', 'streamTitles', 'feedFullNames']
|
||||
})
|
||||
|
||||
logger.info('starting...\n')
|
||||
|
||||
for (const stream of streamsWithoutId.all()) {
|
||||
try {
|
||||
stream.id = await selectChannel(stream, searchIndex, feedsGroupedByChannelId, channelsKeyById)
|
||||
} catch (err) {
|
||||
logger.info(err.message)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
streamsWithoutId.forEach((stream: Stream) => {
|
||||
if (stream.id === '-') {
|
||||
stream.id = ''
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function selectChannel(
|
||||
stream: Stream,
|
||||
searchIndex,
|
||||
feedsGroupedByChannelId: Dictionary,
|
||||
channelsKeyById: Dictionary
|
||||
): Promise<string> {
|
||||
const query = escapeRegex(stream.getTitle())
|
||||
const similarChannels = searchIndex
|
||||
.search(query)
|
||||
.map((item: ChannelSearchableData) => channelsKeyById.get(item.id))
|
||||
|
||||
const url = stream.url.length > 50 ? stream.url.slice(0, 50) + '...' : stream.url
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select channel ID for "${stream.title}" (${url}):`,
|
||||
choices: getChannelChoises(new Collection(similarChannels)),
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type': {
|
||||
const typedChannelId = await input({ message: ' Channel ID:' })
|
||||
if (!typedChannelId) return ''
|
||||
const selectedFeedId = await selectFeed(typedChannelId, feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return typedChannelId
|
||||
return [typedChannelId, selectedFeedId].join('@')
|
||||
}
|
||||
case 'channel': {
|
||||
const selectedChannel = selected.value
|
||||
if (!selectedChannel) return ''
|
||||
const selectedFeedId = await selectFeed(selectedChannel.id, feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return selectedChannel.id
|
||||
return [selectedChannel.id, selectedFeedId].join('@')
|
||||
}
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
async function selectFeed(channelId: string, feedsGroupedByChannelId: Dictionary): Promise<string> {
|
||||
const channelFeeds = new Collection(feedsGroupedByChannelId.get(channelId))
|
||||
const choices = getFeedChoises(channelFeeds)
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select feed ID for "${channelId}":`,
|
||||
choices,
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type':
|
||||
return await input({ message: ' Feed ID:', default: 'SD' })
|
||||
case 'feed':
|
||||
const selectedFeed = selected.value
|
||||
if (!selectedFeed) return ''
|
||||
return selectedFeed.id
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
function getChannelChoises(channels: Collection): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
channels.forEach((channel: Channel) => {
|
||||
const names = new Collection([channel.name, ...channel.altNames.all()]).uniq().join(', ')
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'channel',
|
||||
value: channel
|
||||
},
|
||||
name: `${channel.id} (${names})`,
|
||||
short: `${channel.id}`
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function getFeedChoises(feeds: Collection): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
feeds.forEach((feed: Feed) => {
|
||||
let name = `${feed.id} (${feed.name})`
|
||||
if (feed.isMain) name += ' [main]'
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'feed',
|
||||
value: feed
|
||||
},
|
||||
default: feed.isMain,
|
||||
name,
|
||||
short: feed.id
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function save(filepath: string) {
|
||||
if (!storage.existsSync(filepath)) return
|
||||
const playlist = new Playlist(parsedStreams)
|
||||
storage.saveSync(filepath, playlist.toString())
|
||||
logger.info(`\nFile '${filepath}' successfully saved`)
|
||||
}
|
||||
|
||||
function escapeRegex(string: string) {
|
||||
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
|
||||
}
|
||||
import { loadData, data, searchChannels } from '../../api'
|
||||
import { Collection, Logger } from '@freearhey/core'
|
||||
import { select, input } from '@inquirer/prompts'
|
||||
import { Playlist, Stream } from '../../models'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { PlaylistParser } from '../../core'
|
||||
import nodeCleanup from 'node-cleanup'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
import { truncate } from '../../utils'
|
||||
import { Command } from 'commander'
|
||||
import readline from 'readline'
|
||||
|
||||
type ChoiceValue = { type: string; value?: sdk.Models.Feed | sdk.Models.Channel }
|
||||
type Choice = { name: string; short?: string; value: ChoiceValue; default?: boolean }
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
readline
|
||||
.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
})
|
||||
.on('SIGINT', function () {
|
||||
process.emit('SIGINT')
|
||||
})
|
||||
}
|
||||
|
||||
const program = new Command()
|
||||
|
||||
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
|
||||
|
||||
const filepath = program.args[0]
|
||||
const logger = new Logger()
|
||||
const storage = new Storage()
|
||||
let parsedStreams = new Collection<Stream>()
|
||||
|
||||
main(filepath)
|
||||
nodeCleanup(() => {
|
||||
save(filepath)
|
||||
})
|
||||
|
||||
export default async function main(filepath: string) {
|
||||
if (!(await storage.exists(filepath))) {
|
||||
throw new Error(`File "${filepath}" does not exists`)
|
||||
}
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('loading streams...')
|
||||
const parser = new PlaylistParser({
|
||||
storage
|
||||
})
|
||||
parsedStreams = await parser.parseFile(filepath)
|
||||
const streamsWithoutId = parsedStreams.filter((stream: Stream) => !stream.tvgId)
|
||||
|
||||
logger.info(
|
||||
`found ${parsedStreams.count()} streams (including ${streamsWithoutId.count()} without ID)`
|
||||
)
|
||||
|
||||
logger.info('starting...\n')
|
||||
|
||||
for (const stream of streamsWithoutId.all()) {
|
||||
try {
|
||||
stream.tvgId = await selectChannel(stream)
|
||||
} catch (err) {
|
||||
logger.info(err.message)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
streamsWithoutId.forEach((stream: Stream) => {
|
||||
if (stream.channel === '-') {
|
||||
stream.channel = ''
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function selectChannel(stream: Stream): Promise<string> {
|
||||
const query = escapeRegex(stream.title)
|
||||
const similarChannels = searchChannels(query)
|
||||
const url = truncate(stream.url, 50)
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select channel ID for "${stream.title}" (${url}):`,
|
||||
choices: getChannelChoises(similarChannels),
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type': {
|
||||
const typedChannelId = await input({ message: ' Channel ID:' })
|
||||
if (!typedChannelId) return ''
|
||||
const selectedFeedId = await selectFeed(typedChannelId)
|
||||
if (selectedFeedId === '-') return typedChannelId
|
||||
return [typedChannelId, selectedFeedId].join('@')
|
||||
}
|
||||
case 'channel': {
|
||||
const selectedChannel = selected.value
|
||||
if (!selectedChannel) return ''
|
||||
const selectedFeedId = await selectFeed(selectedChannel.id)
|
||||
if (selectedFeedId === '-') return selectedChannel.id
|
||||
return [selectedChannel.id, selectedFeedId].join('@')
|
||||
}
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
async function selectFeed(channelId: string): Promise<string> {
|
||||
const channelFeeds = new Collection(data.feedsGroupedByChannel.get(channelId))
|
||||
const choices = getFeedChoises(channelFeeds)
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select feed ID for "${channelId}":`,
|
||||
choices,
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type':
|
||||
return await input({ message: ' Feed ID:', default: 'SD' })
|
||||
case 'feed':
|
||||
const selectedFeed = selected.value
|
||||
if (!selectedFeed) return ''
|
||||
return selectedFeed.id
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
function getChannelChoises(channels: Collection<sdk.Models.Channel>): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
channels.forEach((channel: sdk.Models.Channel) => {
|
||||
const names = new Collection([channel.name, ...channel.alt_names]).uniq().join(', ')
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'channel',
|
||||
value: channel
|
||||
},
|
||||
name: `${channel.id} (${names})`,
|
||||
short: `${channel.id}`
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function getFeedChoises(feeds: Collection<sdk.Models.Feed>): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
feeds.forEach((feed: sdk.Models.Feed) => {
|
||||
let name = `${feed.id} (${feed.name})`
|
||||
if (feed.is_main) name += ' [main]'
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'feed',
|
||||
value: feed
|
||||
},
|
||||
default: feed.is_main,
|
||||
name,
|
||||
short: feed.id
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function save(filepath: string) {
|
||||
if (!storage.existsSync(filepath)) return
|
||||
const playlist = new Playlist(parsedStreams)
|
||||
storage.saveSync(filepath, playlist.toString())
|
||||
logger.info(`\nFile '${filepath}' successfully saved`)
|
||||
}
|
||||
|
||||
function escapeRegex(string: string) {
|
||||
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
|
||||
}
|
||||
|
||||
@@ -1,78 +1,84 @@
|
||||
import { Logger, Storage } from '@freearhey/core'
|
||||
import { STREAMS_DIR, DATA_DIR } from '../../constants'
|
||||
import { DataLoader, DataProcessor, PlaylistParser } from '../../core'
|
||||
import { Stream, Playlist } from '../../models'
|
||||
import { program } from 'commander'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
import path from 'node:path'
|
||||
|
||||
program.argument('[filepath...]', 'Path to file to format').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channelsKeyById, feedsGroupedByChannelId, logosGroupedByStreamId }: DataProcessorData =
|
||||
processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByStreamId
|
||||
})
|
||||
let files = program.args.length ? program.args : await streamsStorage.list('**/*.m3u')
|
||||
files = files.map((filepath: string) => path.basename(filepath))
|
||||
let streams = await parser.parse(files)
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
||||
logger.info('normalizing links...')
|
||||
streams = streams.map(stream => {
|
||||
stream.normalizeURL()
|
||||
return stream
|
||||
})
|
||||
|
||||
logger.info('removing duplicates...')
|
||||
streams = streams.uniqBy(stream => stream.url)
|
||||
|
||||
logger.info('removing wrong id...')
|
||||
streams = streams.map((stream: Stream) => {
|
||||
if (!stream.channel || channelsKeyById.missing(stream.channel.id)) {
|
||||
stream.id = ''
|
||||
}
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
logger.info('sorting links...')
|
||||
streams = streams.orderBy(
|
||||
[
|
||||
(stream: Stream) => stream.title,
|
||||
(stream: Stream) => stream.getVerticalResolution(),
|
||||
(stream: Stream) => stream.getLabel(),
|
||||
(stream: Stream) => stream.url
|
||||
],
|
||||
['asc', 'desc', 'asc', 'asc']
|
||||
)
|
||||
|
||||
logger.info('saving...')
|
||||
const groupedStreams = streams.groupBy((stream: Stream) => stream.getFilepath())
|
||||
for (const filepath of groupedStreams.keys()) {
|
||||
const streams = groupedStreams.get(filepath) || []
|
||||
|
||||
if (!streams.length) return
|
||||
|
||||
const playlist = new Playlist(streams, { public: false })
|
||||
await streamsStorage.save(filepath, playlist.toString())
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
import { Collection, Logger } from '@freearhey/core'
|
||||
import { Stream, Playlist } from '../../models'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { STREAMS_DIR } from '../../constants'
|
||||
import { PlaylistParser } from '../../core'
|
||||
import { loadData } from '../../api'
|
||||
import { program } from 'commander'
|
||||
import path from 'node:path'
|
||||
|
||||
program.argument('[filepath...]', 'Path to file to format').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage
|
||||
})
|
||||
let files = program.args.length ? program.args : await streamsStorage.list('**/*.m3u')
|
||||
files = files.map((filepath: string) => path.basename(filepath))
|
||||
let streams = await parser.parse(files)
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
||||
logger.info('normalizing links...')
|
||||
streams = streams.map(stream => {
|
||||
stream.normalizeURL()
|
||||
return stream
|
||||
})
|
||||
|
||||
logger.info('removing duplicates...')
|
||||
streams = streams.uniqBy(stream => stream.url)
|
||||
|
||||
logger.info('removing wrong id...')
|
||||
streams = streams.map((stream: Stream) => {
|
||||
const channel = stream.getChannel()
|
||||
if (channel) return stream
|
||||
|
||||
stream.tvgId = ''
|
||||
stream.channel = ''
|
||||
stream.feed = ''
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
logger.info('adding the missing feed id...')
|
||||
streams = streams.map((stream: Stream) => {
|
||||
const feed = stream.getFeed()
|
||||
if (feed) {
|
||||
stream.feed = feed.id
|
||||
stream.tvgId = stream.getId()
|
||||
}
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
logger.info('sorting links...')
|
||||
streams = streams.sortBy(
|
||||
[
|
||||
(stream: Stream) => stream.title,
|
||||
(stream: Stream) => stream.getVerticalResolution(),
|
||||
(stream: Stream) => stream.label,
|
||||
(stream: Stream) => stream.url
|
||||
],
|
||||
['asc', 'desc', 'asc', 'asc']
|
||||
)
|
||||
|
||||
logger.info('saving...')
|
||||
const groupedStreams = streams.groupBy((stream: Stream) => stream.getFilepath())
|
||||
for (const filepath of groupedStreams.keys()) {
|
||||
const streams = new Collection(groupedStreams.get(filepath))
|
||||
|
||||
if (streams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(streams, { public: false })
|
||||
await streamsStorage.save(filepath, playlist.toString())
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,131 +1,115 @@
|
||||
import { PlaylistParser, DataProcessor, DataLoader } from '../../core'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { DATA_DIR, LOGS_DIR, STREAMS_DIR } from '../../constants'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import { Logger, Storage, File } from '@freearhey/core'
|
||||
import { Stream } from '../../models'
|
||||
import uniqueId from 'lodash.uniqueid'
|
||||
import {
|
||||
IndexCategoryGenerator,
|
||||
IndexLanguageGenerator,
|
||||
IndexCountryGenerator,
|
||||
SubdivisionsGenerator,
|
||||
CategoriesGenerator,
|
||||
CountriesGenerator,
|
||||
LanguagesGenerator,
|
||||
RegionsGenerator,
|
||||
SourcesGenerator,
|
||||
CitiesGenerator,
|
||||
IndexGenerator,
|
||||
RawGenerator
|
||||
} from '../../generators'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
const logFile = new File('generators.log')
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const {
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByStreamId,
|
||||
channelsKeyById,
|
||||
subdivisions,
|
||||
categories,
|
||||
countries,
|
||||
regions,
|
||||
cities
|
||||
}: DataProcessorData = processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByStreamId,
|
||||
channelsKeyById
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
let streams = await parser.parse(files)
|
||||
const totalStreams = streams.count()
|
||||
logger.info(`found ${totalStreams} streams`)
|
||||
|
||||
logger.info('generating raw/...')
|
||||
await new RawGenerator({ streams, logFile }).generate()
|
||||
|
||||
logger.info('filtering streams...')
|
||||
streams = streams.uniqBy((stream: Stream) =>
|
||||
stream.hasId() ? stream.getChannelId() + stream.getFeedId() : uniqueId()
|
||||
)
|
||||
|
||||
logger.info('sorting streams...')
|
||||
streams = streams.orderBy(
|
||||
[
|
||||
(stream: Stream) => stream.getId(),
|
||||
(stream: Stream) => stream.getVerticalResolution(),
|
||||
(stream: Stream) => stream.getLabel()
|
||||
],
|
||||
['asc', 'asc', 'desc']
|
||||
)
|
||||
|
||||
logger.info('generating categories/...')
|
||||
await new CategoriesGenerator({ categories, streams, logFile }).generate()
|
||||
|
||||
logger.info('generating languages/...')
|
||||
await new LanguagesGenerator({ streams, logFile }).generate()
|
||||
|
||||
logger.info('generating countries/...')
|
||||
await new CountriesGenerator({
|
||||
countries,
|
||||
streams,
|
||||
logFile
|
||||
}).generate()
|
||||
|
||||
logger.info('generating subdivisions/...')
|
||||
await new SubdivisionsGenerator({
|
||||
subdivisions,
|
||||
streams,
|
||||
logFile
|
||||
}).generate()
|
||||
|
||||
logger.info('generating cities/...')
|
||||
await new CitiesGenerator({
|
||||
cities,
|
||||
streams,
|
||||
logFile
|
||||
}).generate()
|
||||
|
||||
logger.info('generating regions/...')
|
||||
await new RegionsGenerator({
|
||||
streams,
|
||||
regions,
|
||||
logFile
|
||||
}).generate()
|
||||
|
||||
logger.info('generating sources/...')
|
||||
await new SourcesGenerator({ streams, logFile }).generate()
|
||||
|
||||
logger.info('generating index.m3u...')
|
||||
await new IndexGenerator({ streams, logFile }).generate()
|
||||
|
||||
logger.info('generating index.category.m3u...')
|
||||
await new IndexCategoryGenerator({ streams, logFile }).generate()
|
||||
|
||||
logger.info('generating index.country.m3u...')
|
||||
await new IndexCountryGenerator({
|
||||
streams,
|
||||
logFile
|
||||
}).generate()
|
||||
|
||||
logger.info('generating index.language.m3u...')
|
||||
await new IndexLanguageGenerator({ streams, logFile }).generate()
|
||||
|
||||
logger.info('saving generators.log...')
|
||||
const logStorage = new Storage(LOGS_DIR)
|
||||
logStorage.saveFile(logFile)
|
||||
}
|
||||
|
||||
main()
|
||||
import { LOGS_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { PlaylistParser } from '../../core'
|
||||
import { loadData, data } from '../../api'
|
||||
import { Logger } from '@freearhey/core'
|
||||
import uniqueId from 'lodash.uniqueid'
|
||||
import { Stream } from '../../models'
|
||||
import {
|
||||
IndexCategoryGenerator,
|
||||
IndexLanguageGenerator,
|
||||
IndexCountryGenerator,
|
||||
SubdivisionsGenerator,
|
||||
CategoriesGenerator,
|
||||
CountriesGenerator,
|
||||
LanguagesGenerator,
|
||||
RegionsGenerator,
|
||||
SourcesGenerator,
|
||||
CitiesGenerator,
|
||||
IndexGenerator,
|
||||
RawGenerator
|
||||
} from '../../generators'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
const logFile = new File('generators.log')
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
let streams = await parser.parse(files)
|
||||
const totalStreams = streams.count()
|
||||
logger.info(`found ${totalStreams} streams`)
|
||||
|
||||
logger.info('generating raw/...')
|
||||
await new RawGenerator({ streams, logFile }).generate()
|
||||
|
||||
logger.info('filtering streams...')
|
||||
streams = streams.uniqBy((stream: Stream) => stream.getId() || uniqueId())
|
||||
|
||||
logger.info('sorting streams...')
|
||||
streams = streams.sortBy(
|
||||
[
|
||||
(stream: Stream) => stream.getId(),
|
||||
(stream: Stream) => stream.getVerticalResolution(),
|
||||
(stream: Stream) => stream.label
|
||||
],
|
||||
['asc', 'asc', 'desc']
|
||||
)
|
||||
|
||||
const { categories, countries, subdivisions, cities, regions } = data
|
||||
|
||||
logger.info('generating categories/...')
|
||||
await new CategoriesGenerator({ categories, streams, logFile }).generate()
|
||||
|
||||
logger.info('generating languages/...')
|
||||
await new LanguagesGenerator({ streams, logFile }).generate()
|
||||
|
||||
logger.info('generating countries/...')
|
||||
await new CountriesGenerator({
|
||||
countries,
|
||||
streams,
|
||||
logFile
|
||||
}).generate()
|
||||
|
||||
logger.info('generating subdivisions/...')
|
||||
await new SubdivisionsGenerator({
|
||||
subdivisions,
|
||||
streams,
|
||||
logFile
|
||||
}).generate()
|
||||
|
||||
logger.info('generating cities/...')
|
||||
await new CitiesGenerator({
|
||||
cities,
|
||||
streams,
|
||||
logFile
|
||||
}).generate()
|
||||
|
||||
logger.info('generating regions/...')
|
||||
await new RegionsGenerator({
|
||||
streams,
|
||||
regions,
|
||||
logFile
|
||||
}).generate()
|
||||
|
||||
logger.info('generating sources/...')
|
||||
await new SourcesGenerator({ streams, logFile }).generate()
|
||||
|
||||
logger.info('generating index.m3u...')
|
||||
await new IndexGenerator({ streams, logFile }).generate()
|
||||
|
||||
logger.info('generating index.category.m3u...')
|
||||
await new IndexCategoryGenerator({ streams, logFile }).generate()
|
||||
|
||||
logger.info('generating index.country.m3u...')
|
||||
await new IndexCountryGenerator({
|
||||
streams,
|
||||
logFile
|
||||
}).generate()
|
||||
|
||||
logger.info('generating index.language.m3u...')
|
||||
await new IndexLanguageGenerator({ streams, logFile }).generate()
|
||||
|
||||
logger.info('saving generators.log...')
|
||||
const logStorage = new Storage(LOGS_DIR)
|
||||
logStorage.saveFile(logFile)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,182 +1,177 @@
|
||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { ROOT_DIR, STREAMS_DIR, DATA_DIR } from '../../constants'
|
||||
import { PlaylistParser, StreamTester, CliTable, DataProcessor, DataLoader } from '../../core'
|
||||
import type { TestResult } from '../../core/streamTester'
|
||||
import { Stream } from '../../models'
|
||||
import { program, OptionValues } from 'commander'
|
||||
import { eachLimit } from 'async-es'
|
||||
import chalk from 'chalk'
|
||||
import os from 'node:os'
|
||||
import dns from 'node:dns'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
|
||||
const LIVE_UPDATE_INTERVAL = 5000
|
||||
const LIVE_UPDATE_MAX_STREAMS = 100
|
||||
|
||||
let errors = 0
|
||||
let warnings = 0
|
||||
const results: { [key: string]: string } = {}
|
||||
let interval: string | number | NodeJS.Timeout | undefined
|
||||
let streams = new Collection()
|
||||
let isLiveUpdateEnabled = true
|
||||
|
||||
program
|
||||
.argument('[filepath...]', 'Path to file to test')
|
||||
.option(
|
||||
'-p, --parallel <number>',
|
||||
'Batch size of streams to test concurrently',
|
||||
(value: string) => parseInt(value),
|
||||
os.cpus().length
|
||||
)
|
||||
.option('-x, --proxy <url>', 'Use the specified proxy')
|
||||
.option(
|
||||
'-t, --timeout <number>',
|
||||
'The number of milliseconds before the request will be aborted',
|
||||
(value: string) => parseInt(value),
|
||||
30000
|
||||
)
|
||||
.parse(process.argv)
|
||||
|
||||
const options: OptionValues = program.opts()
|
||||
|
||||
const logger = new Logger()
|
||||
const tester = new StreamTester({ options })
|
||||
|
||||
async function main() {
|
||||
if (await isOffline()) {
|
||||
logger.error(chalk.red('Internet connection is required for the script to work'))
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channelsKeyById, feedsGroupedByChannelId, logosGroupedByStreamId }: DataProcessorData =
|
||||
processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const rootStorage = new Storage(ROOT_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: rootStorage,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByStreamId
|
||||
})
|
||||
const files = program.args.length ? program.args : await rootStorage.list(`${STREAMS_DIR}/*.m3u`)
|
||||
streams = await parser.parse(files)
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
if (streams.count() > LIVE_UPDATE_MAX_STREAMS) isLiveUpdateEnabled = false
|
||||
|
||||
logger.info('starting...')
|
||||
if (!isLiveUpdateEnabled) {
|
||||
drawTable()
|
||||
interval = setInterval(() => {
|
||||
drawTable()
|
||||
}, LIVE_UPDATE_INTERVAL)
|
||||
}
|
||||
|
||||
await eachLimit(
|
||||
streams.all(),
|
||||
options.parallel,
|
||||
async (stream: Stream) => {
|
||||
await runTest(stream)
|
||||
|
||||
if (isLiveUpdateEnabled) {
|
||||
drawTable()
|
||||
}
|
||||
},
|
||||
onFinish
|
||||
)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function runTest(stream: Stream) {
|
||||
const key = stream.filepath + stream.getId() + stream.url
|
||||
results[key] = chalk.white('LOADING...')
|
||||
|
||||
const result: TestResult = await tester.test(stream)
|
||||
|
||||
let status = ''
|
||||
const errorStatusCodes = ['ENOTFOUND', 'HTTP_404_NOT_FOUND']
|
||||
if (result.status.ok) status = chalk.green('OK')
|
||||
else if (errorStatusCodes.includes(result.status.code)) {
|
||||
status = chalk.red(result.status.code)
|
||||
errors++
|
||||
} else {
|
||||
status = chalk.yellow(result.status.code)
|
||||
warnings++
|
||||
}
|
||||
|
||||
results[key] = status
|
||||
}
|
||||
|
||||
function drawTable() {
|
||||
process.stdout.write('\u001b[3J\u001b[1J')
|
||||
console.clear()
|
||||
|
||||
const streamsGrouped = streams.groupBy((stream: Stream) => stream.filepath)
|
||||
for (const filepath of streamsGrouped.keys()) {
|
||||
const streams: Stream[] = streamsGrouped.get(filepath)
|
||||
|
||||
const table = new CliTable({
|
||||
columns: [
|
||||
{ name: '', alignment: 'center', minLen: 3, maxLen: 3 },
|
||||
{ name: 'tvg-id', alignment: 'left', color: 'green', minLen: 25, maxLen: 25 },
|
||||
{ name: 'url', alignment: 'left', color: 'green', minLen: 100, maxLen: 100 },
|
||||
{ name: 'status', alignment: 'left', minLen: 25, maxLen: 25 }
|
||||
]
|
||||
})
|
||||
streams.forEach((stream: Stream, index: number) => {
|
||||
const status = results[stream.filepath + stream.getId() + stream.url] || chalk.gray('PENDING')
|
||||
|
||||
const row = {
|
||||
'': index,
|
||||
'tvg-id': stream.getId().length > 25 ? stream.getId().slice(0, 22) + '...' : stream.getId(),
|
||||
url: stream.url.length > 100 ? stream.url.slice(0, 97) + '...' : stream.url,
|
||||
status
|
||||
}
|
||||
table.append(row)
|
||||
})
|
||||
|
||||
process.stdout.write(`\n${chalk.underline(filepath)}\n`)
|
||||
|
||||
process.stdout.write(table.toString())
|
||||
}
|
||||
}
|
||||
|
||||
function onFinish(error: any) {
|
||||
clearInterval(interval)
|
||||
|
||||
if (error) {
|
||||
console.error(error)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
drawTable()
|
||||
|
||||
if (errors > 0 || warnings > 0) {
|
||||
console.log(
|
||||
chalk.red(`\n${errors + warnings} problems (${errors} errors, ${warnings} warnings)`)
|
||||
)
|
||||
|
||||
if (errors > 0) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
async function isOffline() {
|
||||
return new Promise((resolve, reject) => {
|
||||
dns.lookup('info.cern.ch', err => {
|
||||
if (err) resolve(true)
|
||||
reject(false)
|
||||
})
|
||||
}).catch(() => {})
|
||||
}
|
||||
import { PlaylistParser, StreamTester, CliTable } from '../../core'
|
||||
import type { TestResult } from '../../core/streamTester'
|
||||
import { ROOT_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { Logger, Collection } from '@freearhey/core'
|
||||
import { program, OptionValues } from 'commander'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { Stream } from '../../models'
|
||||
import { loadData } from '../../api'
|
||||
import { eachLimit } from 'async'
|
||||
import dns from 'node:dns'
|
||||
import chalk from 'chalk'
|
||||
import os from 'node:os'
|
||||
import { truncate } from '../../utils'
|
||||
|
||||
const LIVE_UPDATE_INTERVAL = 5000
|
||||
const LIVE_UPDATE_MAX_STREAMS = 100
|
||||
|
||||
let errors = 0
|
||||
let warnings = 0
|
||||
const results: { [key: string]: string } = {}
|
||||
let interval: string | number | NodeJS.Timeout | undefined
|
||||
let streams = new Collection<Stream>()
|
||||
let isLiveUpdateEnabled = true
|
||||
|
||||
program
|
||||
.argument('[filepath...]', 'Path to file to test')
|
||||
.option(
|
||||
'-p, --parallel <number>',
|
||||
'Batch size of streams to test concurrently',
|
||||
(value: string) => parseInt(value),
|
||||
os.cpus().length
|
||||
)
|
||||
.option('-x, --proxy <url>', 'Use the specified proxy')
|
||||
.option(
|
||||
'-t, --timeout <number>',
|
||||
'The number of milliseconds before the request will be aborted',
|
||||
(value: string) => parseInt(value),
|
||||
30000
|
||||
)
|
||||
.parse(process.argv)
|
||||
|
||||
const options: OptionValues = program.opts()
|
||||
|
||||
const logger = new Logger()
|
||||
const tester = new StreamTester({ options })
|
||||
|
||||
async function main() {
|
||||
if (await isOffline()) {
|
||||
logger.error(chalk.red('Internet connection is required for the script to work'))
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('loading streams...')
|
||||
const rootStorage = new Storage(ROOT_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: rootStorage
|
||||
})
|
||||
const files = program.args.length ? program.args : await rootStorage.list(`${STREAMS_DIR}/*.m3u`)
|
||||
streams = await parser.parse(files)
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
if (streams.count() > LIVE_UPDATE_MAX_STREAMS) isLiveUpdateEnabled = false
|
||||
|
||||
logger.info('starting...')
|
||||
if (!isLiveUpdateEnabled) {
|
||||
drawTable()
|
||||
interval = setInterval(() => {
|
||||
drawTable()
|
||||
}, LIVE_UPDATE_INTERVAL)
|
||||
}
|
||||
|
||||
eachLimit(
|
||||
streams.all(),
|
||||
options.parallel,
|
||||
async (stream: Stream) => {
|
||||
await runTest(stream)
|
||||
|
||||
if (isLiveUpdateEnabled) {
|
||||
drawTable()
|
||||
}
|
||||
},
|
||||
onFinish
|
||||
)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function runTest(stream: Stream) {
|
||||
const key = stream.getUniqKey()
|
||||
results[key] = chalk.white('LOADING...')
|
||||
|
||||
const result: TestResult = await tester.test(stream)
|
||||
|
||||
let status = ''
|
||||
const errorStatusCodes = ['ENOTFOUND', 'HTTP_404_NOT_FOUND']
|
||||
if (result.status.ok) status = chalk.green('OK')
|
||||
else if (errorStatusCodes.includes(result.status.code)) {
|
||||
status = chalk.red(result.status.code)
|
||||
errors++
|
||||
} else {
|
||||
status = chalk.yellow(result.status.code)
|
||||
warnings++
|
||||
}
|
||||
|
||||
results[key] = status
|
||||
}
|
||||
|
||||
function drawTable() {
|
||||
process.stdout.write('\u001b[3J\u001b[1J')
|
||||
console.clear()
|
||||
|
||||
const streamsGrouped = streams.groupBy((stream: Stream) => stream.filepath)
|
||||
for (const filepath of streamsGrouped.keys()) {
|
||||
const streams: Stream[] = streamsGrouped.get(filepath) || []
|
||||
|
||||
const table = new CliTable({
|
||||
columns: [
|
||||
{ name: '', alignment: 'center', minLen: 3, maxLen: 3 },
|
||||
{ name: 'tvg-id', alignment: 'left', color: 'green', minLen: 25, maxLen: 25 },
|
||||
{ name: 'url', alignment: 'left', color: 'green', minLen: 100, maxLen: 100 },
|
||||
{ name: 'status', alignment: 'left', minLen: 25, maxLen: 25 }
|
||||
]
|
||||
})
|
||||
streams.forEach((stream: Stream, index: number) => {
|
||||
const key = stream.getUniqKey()
|
||||
const status = results[key] || chalk.gray('PENDING')
|
||||
const tvgId = stream.getTvgId()
|
||||
|
||||
const row = {
|
||||
'': index,
|
||||
'tvg-id': truncate(tvgId, 25),
|
||||
url: truncate(stream.url, 100),
|
||||
status
|
||||
}
|
||||
table.append(row)
|
||||
})
|
||||
|
||||
process.stdout.write(`\n${chalk.underline(filepath)}\n`)
|
||||
|
||||
process.stdout.write(table.toString())
|
||||
}
|
||||
}
|
||||
|
||||
function onFinish(error: Error) {
|
||||
clearInterval(interval)
|
||||
|
||||
if (error) {
|
||||
console.error(error)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
drawTable()
|
||||
|
||||
if (errors > 0 || warnings > 0) {
|
||||
console.log(
|
||||
chalk.red(`\n${errors + warnings} problems (${errors} errors, ${warnings} warnings)`)
|
||||
)
|
||||
|
||||
if (errors > 0) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
async function isOffline() {
|
||||
return new Promise((resolve, reject) => {
|
||||
dns.lookup('info.cern.ch', err => {
|
||||
if (err) resolve(true)
|
||||
reject(false)
|
||||
})
|
||||
}).catch(() => {})
|
||||
}
|
||||
|
||||
@@ -1,194 +1,174 @@
|
||||
import { DataLoader, DataProcessor, IssueLoader, PlaylistParser } from '../../core'
|
||||
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { Stream, Playlist, Channel, Issue } from '../../models'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import { DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { isURI } from '../../utils'
|
||||
|
||||
const processedIssues = new Collection()
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ level: -999 })
|
||||
const issueLoader = new IssueLoader()
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await issueLoader.load()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const dataLoader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await dataLoader.load()
|
||||
const { channelsKeyById, feedsGroupedByChannelId, logosGroupedByStreamId }: DataProcessorData =
|
||||
processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByStreamId,
|
||||
channelsKeyById
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
const streams = await parser.parse(files)
|
||||
|
||||
logger.info('removing streams...')
|
||||
await removeStreams({ streams, issues })
|
||||
|
||||
logger.info('edit stream description...')
|
||||
await editStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
|
||||
logger.info('add new streams...')
|
||||
await addStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
})
|
||||
|
||||
logger.info('saving...')
|
||||
const groupedStreams = streams.groupBy((stream: Stream) => stream.getFilepath())
|
||||
for (const filepath of groupedStreams.keys()) {
|
||||
let streams = groupedStreams.get(filepath) || []
|
||||
streams = streams.filter((stream: Stream) => stream.removed === false)
|
||||
|
||||
const playlist = new Playlist(streams, { public: false })
|
||||
await streamsStorage.save(filepath, playlist.toString())
|
||||
}
|
||||
|
||||
const output = processedIssues.map(issue_number => `closes #${issue_number}`).join(', ')
|
||||
console.log(`OUTPUT=${output}`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function removeStreams({ streams, issues }: { streams: Collection; issues: Collection }) {
|
||||
const requests = issues.filter(
|
||||
issue => issue.labels.includes('streams:remove') && issue.labels.includes('approved')
|
||||
)
|
||||
requests.forEach((issue: Issue) => {
|
||||
const data = issue.data
|
||||
if (data.missing('streamUrl')) return
|
||||
|
||||
const streamUrls = data.getString('streamUrl') || ''
|
||||
|
||||
let changed = false
|
||||
streamUrls
|
||||
.split(/\r?\n/)
|
||||
.filter(Boolean)
|
||||
.forEach(link => {
|
||||
const found: Stream = streams.first((_stream: Stream) => _stream.url === link.trim())
|
||||
if (found) {
|
||||
found.removed = true
|
||||
changed = true
|
||||
}
|
||||
})
|
||||
|
||||
if (changed) processedIssues.add(issue.number)
|
||||
})
|
||||
}
|
||||
|
||||
async function editStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
}: {
|
||||
streams: Collection
|
||||
issues: Collection
|
||||
channelsKeyById: Dictionary
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
}) {
|
||||
const requests = issues.filter(
|
||||
issue => issue.labels.includes('streams:edit') && issue.labels.includes('approved')
|
||||
)
|
||||
requests.forEach((issue: Issue) => {
|
||||
const data = issue.data
|
||||
|
||||
if (data.missing('streamUrl')) return
|
||||
|
||||
const stream: Stream = streams.first(
|
||||
(_stream: Stream) => _stream.url === data.getString('streamUrl')
|
||||
)
|
||||
if (!stream) return
|
||||
|
||||
const streamId = data.getString('streamId') || ''
|
||||
const [channelId, feedId] = streamId.split('@')
|
||||
|
||||
if (channelId) {
|
||||
stream
|
||||
.setChannelId(channelId)
|
||||
.setFeedId(feedId)
|
||||
.withChannel(channelsKeyById)
|
||||
.withFeed(feedsGroupedByChannelId)
|
||||
.updateId()
|
||||
.updateTitle()
|
||||
.updateFilepath()
|
||||
}
|
||||
|
||||
stream.update(data)
|
||||
|
||||
processedIssues.add(issue.number)
|
||||
})
|
||||
}
|
||||
|
||||
async function addStreams({
|
||||
streams,
|
||||
issues,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId
|
||||
}: {
|
||||
streams: Collection
|
||||
issues: Collection
|
||||
channelsKeyById: Dictionary
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
}) {
|
||||
const requests = issues.filter(
|
||||
issue => issue.labels.includes('streams:add') && issue.labels.includes('approved')
|
||||
)
|
||||
requests.forEach((issue: Issue) => {
|
||||
const data = issue.data
|
||||
if (data.missing('streamId') || data.missing('streamUrl')) return
|
||||
if (streams.includes((_stream: Stream) => _stream.url === data.getString('streamUrl'))) return
|
||||
const streamUrl = data.getString('streamUrl') || ''
|
||||
if (!isURI(streamUrl)) return
|
||||
|
||||
const streamId = data.getString('streamId') || ''
|
||||
const [channelId, feedId] = streamId.split('@')
|
||||
|
||||
const channel: Channel = channelsKeyById.get(channelId)
|
||||
if (!channel) return
|
||||
|
||||
const label = data.getString('label') || null
|
||||
const quality = data.getString('quality') || null
|
||||
const httpUserAgent = data.getString('httpUserAgent') || null
|
||||
const httpReferrer = data.getString('httpReferrer') || null
|
||||
const directives = data.getArray('directives') || []
|
||||
|
||||
const stream = new Stream({
|
||||
channelId,
|
||||
feedId,
|
||||
title: channel.name,
|
||||
url: streamUrl,
|
||||
userAgent: httpUserAgent,
|
||||
referrer: httpReferrer,
|
||||
directives,
|
||||
quality,
|
||||
label
|
||||
})
|
||||
.withChannel(channelsKeyById)
|
||||
.withFeed(feedsGroupedByChannelId)
|
||||
.updateTitle()
|
||||
.updateFilepath()
|
||||
|
||||
streams.add(stream)
|
||||
processedIssues.add(issue.number)
|
||||
})
|
||||
}
|
||||
import { IssueLoader, PlaylistParser } from '../../core'
|
||||
import { Playlist, Issue, Stream } from '../../models'
|
||||
import { loadData, data as apiData } from '../../api'
|
||||
import { Logger, Collection } from '@freearhey/core'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { STREAMS_DIR } from '../../constants'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
import { isURI } from '../../utils'
|
||||
|
||||
const processedIssues = new Collection()
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ level: -999 })
|
||||
const issueLoader = new IssueLoader()
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await issueLoader.load()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
const streams = await parser.parse(files)
|
||||
|
||||
logger.info('removing streams...')
|
||||
await removeStreams({ streams, issues })
|
||||
|
||||
logger.info('edit stream description...')
|
||||
await editStreams({
|
||||
streams,
|
||||
issues
|
||||
})
|
||||
|
||||
logger.info('add new streams...')
|
||||
await addStreams({
|
||||
streams,
|
||||
issues
|
||||
})
|
||||
|
||||
logger.info('saving...')
|
||||
const groupedStreams = streams.groupBy((stream: Stream) => stream.getFilepath())
|
||||
for (const filepath of groupedStreams.keys()) {
|
||||
let streams = new Collection(groupedStreams.get(filepath))
|
||||
streams = streams.filter((stream: Stream) => stream.removed === false)
|
||||
|
||||
const playlist = new Playlist(streams, { public: false })
|
||||
await streamsStorage.save(filepath, playlist.toString())
|
||||
}
|
||||
|
||||
const output = processedIssues.map(issue_number => `closes #${issue_number}`).join(', ')
|
||||
console.log(`OUTPUT=${output}`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function removeStreams({
|
||||
streams,
|
||||
issues
|
||||
}: {
|
||||
streams: Collection<Stream>
|
||||
issues: Collection<Issue>
|
||||
}) {
|
||||
const requests = issues.filter(
|
||||
issue => issue.labels.includes('streams:remove') && issue.labels.includes('approved')
|
||||
)
|
||||
|
||||
requests.forEach((issue: Issue) => {
|
||||
const data = issue.data
|
||||
if (data.missing('streamUrl')) return
|
||||
|
||||
const streamUrls = data.getString('streamUrl') || ''
|
||||
|
||||
let changed = false
|
||||
streamUrls
|
||||
.split(/\r?\n/)
|
||||
.filter(Boolean)
|
||||
.forEach(link => {
|
||||
const found: Stream = streams.first((_stream: Stream) => _stream.url === link.trim())
|
||||
if (found) {
|
||||
found.removed = true
|
||||
changed = true
|
||||
}
|
||||
})
|
||||
|
||||
if (changed) processedIssues.add(issue.number)
|
||||
})
|
||||
}
|
||||
|
||||
async function editStreams({
|
||||
streams,
|
||||
issues
|
||||
}: {
|
||||
streams: Collection<Stream>
|
||||
issues: Collection<Issue>
|
||||
}) {
|
||||
const requests = issues.filter(
|
||||
issue => issue.labels.includes('streams:edit') && issue.labels.includes('approved')
|
||||
)
|
||||
requests.forEach((issue: Issue) => {
|
||||
const data = issue.data
|
||||
|
||||
if (data.missing('streamUrl')) return
|
||||
|
||||
const stream: Stream = streams.first(
|
||||
(_stream: Stream) => _stream.url === data.getString('streamUrl')
|
||||
)
|
||||
if (!stream) return
|
||||
|
||||
const streamId = data.getString('streamId') || ''
|
||||
const [channelId, feedId] = streamId.split('@')
|
||||
|
||||
if (channelId) {
|
||||
stream.channel = channelId
|
||||
stream.feed = feedId
|
||||
stream.updateTvgId().updateTitle().updateFilepath()
|
||||
}
|
||||
|
||||
stream.updateWithIssue(data)
|
||||
|
||||
processedIssues.add(issue.number)
|
||||
})
|
||||
}
|
||||
|
||||
async function addStreams({
|
||||
streams,
|
||||
issues
|
||||
}: {
|
||||
streams: Collection<Stream>
|
||||
issues: Collection<Issue>
|
||||
}) {
|
||||
const requests = issues.filter(
|
||||
issue => issue.labels.includes('streams:add') && issue.labels.includes('approved')
|
||||
)
|
||||
requests.forEach((issue: Issue) => {
|
||||
const data = issue.data
|
||||
if (data.missing('streamId') || data.missing('streamUrl')) return
|
||||
if (streams.includes((_stream: Stream) => _stream.url === data.getString('streamUrl'))) return
|
||||
const streamUrl = data.getString('streamUrl') || ''
|
||||
if (!isURI(streamUrl)) return
|
||||
|
||||
const streamId = data.getString('streamId') || ''
|
||||
const [channelId, feedId] = streamId.split('@')
|
||||
|
||||
const channel: sdk.Models.Channel | undefined = apiData.channelsKeyById.get(channelId)
|
||||
if (!channel) return
|
||||
|
||||
const label = data.getString('label') || ''
|
||||
const quality = data.getString('quality') || null
|
||||
const httpUserAgent = data.getString('httpUserAgent') || null
|
||||
const httpReferrer = data.getString('httpReferrer') || null
|
||||
const directives = data.getArray('directives') || []
|
||||
|
||||
const stream = new Stream({
|
||||
channel: channelId,
|
||||
feed: feedId,
|
||||
title: channel.name,
|
||||
url: streamUrl,
|
||||
user_agent: httpUserAgent,
|
||||
referrer: httpReferrer,
|
||||
quality
|
||||
})
|
||||
|
||||
stream.label = label
|
||||
stream.setDirectives(directives).updateTitle().updateFilepath()
|
||||
|
||||
streams.add(stream)
|
||||
processedIssues.add(issue.number)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,129 +1,120 @@
|
||||
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
|
||||
import { DataLoader, DataProcessor, PlaylistParser } from '../../core'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { DATA_DIR, ROOT_DIR } from '../../constants'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { BlocklistRecord, Stream } from '../../models'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
|
||||
program.argument('[filepath...]', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
type LogItem = {
|
||||
type: string
|
||||
line: number
|
||||
message: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const {
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByStreamId,
|
||||
blocklistRecordsGroupedByChannelId
|
||||
}: DataProcessorData = processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const rootStorage = new Storage(ROOT_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: rootStorage,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByStreamId
|
||||
})
|
||||
const files = program.args.length ? program.args : await rootStorage.list('streams/**/*.m3u')
|
||||
const streams = await parser.parse(files)
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
||||
let errors = new Collection()
|
||||
let warnings = new Collection()
|
||||
const streamsGroupedByFilepath = streams.groupBy((stream: Stream) => stream.getFilepath())
|
||||
for (const filepath of streamsGroupedByFilepath.keys()) {
|
||||
const streams = streamsGroupedByFilepath.get(filepath)
|
||||
if (!streams) continue
|
||||
|
||||
const log = new Collection()
|
||||
const buffer = new Dictionary()
|
||||
streams.forEach((stream: Stream) => {
|
||||
if (stream.channelId) {
|
||||
const channel = channelsKeyById.get(stream.channelId)
|
||||
if (!channel) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.getLine(),
|
||||
message: `"${stream.id}" is not in the database`
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const duplicate = stream.url && buffer.has(stream.url)
|
||||
if (duplicate) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.getLine(),
|
||||
message: `"${stream.url}" is already on the playlist`
|
||||
})
|
||||
} else {
|
||||
buffer.set(stream.url, true)
|
||||
}
|
||||
|
||||
const blocklistRecords = stream.channel
|
||||
? new Collection(blocklistRecordsGroupedByChannelId.get(stream.channel.id))
|
||||
: new Collection()
|
||||
|
||||
blocklistRecords.forEach((blocklistRecord: BlocklistRecord) => {
|
||||
if (blocklistRecord.reason === 'dmca') {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: stream.getLine(),
|
||||
message: `"${blocklistRecord.channelId}" is on the blocklist due to claims of copyright holders (${blocklistRecord.ref})`
|
||||
})
|
||||
} else if (blocklistRecord.reason === 'nsfw') {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: stream.getLine(),
|
||||
message: `"${blocklistRecord.channelId}" is on the blocklist due to NSFW content (${blocklistRecord.ref})`
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
if (log.notEmpty()) {
|
||||
console.log(`\n${chalk.underline(filepath)}`)
|
||||
|
||||
log.forEach((logItem: LogItem) => {
|
||||
const position = logItem.line.toString().padEnd(6, ' ')
|
||||
const type = logItem.type.padEnd(9, ' ')
|
||||
const status = logItem.type === 'error' ? chalk.red(type) : chalk.yellow(type)
|
||||
|
||||
console.log(` ${chalk.gray(position)}${status}${logItem.message}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(log.filter((logItem: LogItem) => logItem.type === 'error'))
|
||||
warnings = warnings.concat(log.filter((logItem: LogItem) => logItem.type === 'warning'))
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.count() || warnings.count()) {
|
||||
console.log(
|
||||
chalk.red(
|
||||
`\n${
|
||||
errors.count() + warnings.count()
|
||||
} problems (${errors.count()} errors, ${warnings.count()} warnings)`
|
||||
)
|
||||
)
|
||||
|
||||
if (errors.count()) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
import { Logger, Collection, Dictionary } from '@freearhey/core'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { PlaylistParser } from '../../core'
|
||||
import { data, loadData } from '../../api'
|
||||
import { ROOT_DIR } from '../../constants'
|
||||
import { Stream } from '../../models'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
|
||||
program.argument('[filepath...]', 'Path to file to validate').parse(process.argv)
|
||||
|
||||
type LogItem = {
|
||||
type: string
|
||||
line: number
|
||||
message: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('loading streams...')
|
||||
const rootStorage = new Storage(ROOT_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: rootStorage
|
||||
})
|
||||
const files = program.args.length ? program.args : await rootStorage.list('streams/**/*.m3u')
|
||||
const streams = await parser.parse(files)
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
|
||||
let errors = new Collection()
|
||||
let warnings = new Collection()
|
||||
const streamsGroupedByFilepath = streams.groupBy((stream: Stream) => stream.getFilepath())
|
||||
for (const filepath of streamsGroupedByFilepath.keys()) {
|
||||
const streams = streamsGroupedByFilepath.get(filepath)
|
||||
if (!streams) continue
|
||||
|
||||
const log = new Collection<LogItem>()
|
||||
const buffer = new Dictionary<boolean>()
|
||||
streams.forEach((stream: Stream) => {
|
||||
if (stream.channel) {
|
||||
const channel = data.channelsKeyById.get(stream.channel)
|
||||
if (!channel) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.getLine(),
|
||||
message: `"${stream.tvgId}" is not in the database`
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const duplicate = stream.url && buffer.has(stream.url)
|
||||
if (duplicate) {
|
||||
log.add({
|
||||
type: 'warning',
|
||||
line: stream.getLine(),
|
||||
message: `"${stream.url}" is already on the playlist`
|
||||
})
|
||||
} else {
|
||||
buffer.set(stream.url, true)
|
||||
}
|
||||
|
||||
if (stream.channel) {
|
||||
const blocklistRecords = new Collection(
|
||||
data.blocklistRecordsGroupedByChannel.get(stream.channel)
|
||||
)
|
||||
|
||||
blocklistRecords.forEach((blocklistRecord: sdk.Models.BlocklistRecord) => {
|
||||
if (blocklistRecord.reason === 'dmca') {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: stream.getLine(),
|
||||
message: `"${blocklistRecord.channel}" is on the blocklist due to claims of copyright holders (${blocklistRecord.ref})`
|
||||
})
|
||||
} else if (blocklistRecord.reason === 'nsfw') {
|
||||
log.add({
|
||||
type: 'error',
|
||||
line: stream.getLine(),
|
||||
message: `"${blocklistRecord.channel}" is on the blocklist due to NSFW content (${blocklistRecord.ref})`
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (log.isNotEmpty()) {
|
||||
console.log(`\n${chalk.underline(filepath)}`)
|
||||
|
||||
log.forEach((logItem: LogItem) => {
|
||||
const position = logItem.line.toString().padEnd(6, ' ')
|
||||
const type = logItem.type.padEnd(9, ' ')
|
||||
const status = logItem.type === 'error' ? chalk.red(type) : chalk.yellow(type)
|
||||
|
||||
console.log(` ${chalk.gray(position)}${status}${logItem.message}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(log.filter((logItem: LogItem) => logItem.type === 'error'))
|
||||
warnings = warnings.concat(log.filter((logItem: LogItem) => logItem.type === 'warning'))
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.count() || warnings.count()) {
|
||||
console.log(
|
||||
chalk.red(
|
||||
`\n${
|
||||
errors.count() + warnings.count()
|
||||
} problems (${errors.count()} errors, ${warnings.count()} warnings)`
|
||||
)
|
||||
)
|
||||
|
||||
if (errors.count()) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,48 +1,30 @@
|
||||
import { CategoriesTable, CountriesTable, LanguagesTable, RegionsTable } from '../../tables'
|
||||
import { DataLoader, DataProcessor, Markdown } from '../../core'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { README_DIR, DATA_DIR, ROOT_DIR } from '../../constants'
|
||||
import { Logger, Storage } from '@freearhey/core'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const processor = new DataProcessor()
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const {
|
||||
subdivisionsKeyByCode,
|
||||
languagesKeyByCode,
|
||||
countriesKeyByCode,
|
||||
categoriesKeyById,
|
||||
subdivisions,
|
||||
countries,
|
||||
regions,
|
||||
cities
|
||||
}: DataProcessorData = processor.process(data)
|
||||
|
||||
logger.info('creating category table...')
|
||||
await new CategoriesTable({ categoriesKeyById }).make()
|
||||
logger.info('creating language table...')
|
||||
await new LanguagesTable({ languagesKeyByCode }).make()
|
||||
logger.info('creating countires table...')
|
||||
await new CountriesTable({
|
||||
countriesKeyByCode,
|
||||
subdivisionsKeyByCode,
|
||||
subdivisions,
|
||||
countries,
|
||||
cities
|
||||
}).make()
|
||||
logger.info('creating region table...')
|
||||
await new RegionsTable({ regions }).make()
|
||||
|
||||
logger.info('updating playlists.md...')
|
||||
const playlists = new Markdown({
|
||||
build: `${ROOT_DIR}/PLAYLISTS.md`,
|
||||
template: `${README_DIR}/template.md`
|
||||
})
|
||||
playlists.compile()
|
||||
}
|
||||
|
||||
main()
|
||||
import { CategoriesTable, CountriesTable, LanguagesTable, RegionsTable } from '../../tables'
|
||||
import { README_DIR, ROOT_DIR } from '../../constants'
|
||||
import { Logger } from '@freearhey/core'
|
||||
import { Markdown } from '../../core'
|
||||
import { loadData } from '../../api'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('creating category table...')
|
||||
await new CategoriesTable().create()
|
||||
logger.info('creating language table...')
|
||||
await new LanguagesTable().create()
|
||||
logger.info('creating countires table...')
|
||||
await new CountriesTable().create()
|
||||
logger.info('creating region table...')
|
||||
await new RegionsTable().create()
|
||||
|
||||
logger.info('updating playlists.md...')
|
||||
const playlists = new Markdown({
|
||||
build: `${ROOT_DIR}/PLAYLISTS.md`,
|
||||
template: `${README_DIR}/template.md`
|
||||
})
|
||||
playlists.compile()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,178 +1,176 @@
|
||||
import { DataLoader, DataProcessor, IssueLoader, PlaylistParser } from '../../core'
|
||||
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { DATA_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { Issue, Stream } from '../../models'
|
||||
import { isURI } from '../../utils'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
const issueLoader = new IssueLoader()
|
||||
let report = new Collection()
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await issueLoader.load()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const dataLoader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await dataLoader.load()
|
||||
const {
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByStreamId,
|
||||
blocklistRecordsGroupedByChannelId
|
||||
}: DataProcessorData = processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByStreamId
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
const streams = await parser.parse(files)
|
||||
const streamsGroupedByUrl = streams.groupBy((stream: Stream) => stream.url)
|
||||
const streamsGroupedByChannelId = streams.groupBy((stream: Stream) => stream.channelId)
|
||||
const streamsGroupedById = streams.groupBy((stream: Stream) => stream.getId())
|
||||
|
||||
logger.info('checking streams:remove requests...')
|
||||
const removeRequests = issues.filter(issue =>
|
||||
issue.labels.find((label: string) => label === 'streams:remove')
|
||||
)
|
||||
removeRequests.forEach((issue: Issue) => {
|
||||
const streamUrls = issue.data.getArray('streamUrl') || []
|
||||
|
||||
if (!streamUrls.length) {
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'streams:remove',
|
||||
streamId: undefined,
|
||||
streamUrl: undefined,
|
||||
status: 'missing_link'
|
||||
}
|
||||
|
||||
report.add(result)
|
||||
} else {
|
||||
for (const streamUrl of streamUrls) {
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'streams:remove',
|
||||
streamId: undefined,
|
||||
streamUrl: truncate(streamUrl),
|
||||
status: 'pending'
|
||||
}
|
||||
|
||||
if (streamsGroupedByUrl.missing(streamUrl)) {
|
||||
result.status = 'wrong_link'
|
||||
}
|
||||
|
||||
report.add(result)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
logger.info('checking streams:add requests...')
|
||||
const addRequests = issues.filter(issue => issue.labels.includes('streams:add'))
|
||||
const addRequestsBuffer = new Dictionary()
|
||||
addRequests.forEach((issue: Issue) => {
|
||||
const streamId = issue.data.getString('streamId') || ''
|
||||
const streamUrl = issue.data.getString('streamUrl') || ''
|
||||
const [channelId] = streamId.split('@')
|
||||
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'streams:add',
|
||||
streamId: streamId || undefined,
|
||||
streamUrl: truncate(streamUrl),
|
||||
status: 'pending'
|
||||
}
|
||||
|
||||
if (!channelId) result.status = 'missing_id'
|
||||
else if (!streamUrl) result.status = 'missing_link'
|
||||
else if (!isURI(streamUrl)) result.status = 'invalid_link'
|
||||
else if (blocklistRecordsGroupedByChannelId.has(channelId)) result.status = 'blocked'
|
||||
else if (channelsKeyById.missing(channelId)) result.status = 'wrong_id'
|
||||
else if (streamsGroupedByUrl.has(streamUrl)) result.status = 'on_playlist'
|
||||
else if (addRequestsBuffer.has(streamUrl)) result.status = 'duplicate'
|
||||
else result.status = 'pending'
|
||||
|
||||
addRequestsBuffer.set(streamUrl, true)
|
||||
|
||||
report.add(result)
|
||||
})
|
||||
|
||||
logger.info('checking streams:edit requests...')
|
||||
const editRequests = issues.filter(issue =>
|
||||
issue.labels.find((label: string) => label === 'streams:edit')
|
||||
)
|
||||
editRequests.forEach((issue: Issue) => {
|
||||
const streamId = issue.data.getString('streamId') || ''
|
||||
const streamUrl = issue.data.getString('streamUrl') || ''
|
||||
const [channelId] = streamId.split('@')
|
||||
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'streams:edit',
|
||||
streamId: streamId || undefined,
|
||||
streamUrl: truncate(streamUrl),
|
||||
status: 'pending'
|
||||
}
|
||||
|
||||
if (!streamUrl) result.status = 'missing_link'
|
||||
else if (streamsGroupedByUrl.missing(streamUrl)) result.status = 'invalid_link'
|
||||
else if (channelId && channelsKeyById.missing(channelId)) result.status = 'invalid_id'
|
||||
|
||||
report.add(result)
|
||||
})
|
||||
|
||||
logger.info('checking channel search requests...')
|
||||
const channelSearchRequests = issues.filter(issue =>
|
||||
issue.labels.find((label: string) => label === 'channel search')
|
||||
)
|
||||
const channelSearchRequestsBuffer = new Dictionary()
|
||||
channelSearchRequests.forEach((issue: Issue) => {
|
||||
const streamId = issue.data.getString('channelId') || ''
|
||||
const [channelId, feedId] = streamId.split('@')
|
||||
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'channel search',
|
||||
streamId: streamId || undefined,
|
||||
streamUrl: undefined,
|
||||
status: 'pending'
|
||||
}
|
||||
|
||||
if (!channelId) result.status = 'missing_id'
|
||||
else if (channelsKeyById.missing(channelId)) result.status = 'invalid_id'
|
||||
else if (channelSearchRequestsBuffer.has(streamId)) result.status = 'duplicate'
|
||||
else if (blocklistRecordsGroupedByChannelId.has(channelId)) result.status = 'blocked'
|
||||
else if (streamsGroupedById.has(streamId)) result.status = 'fulfilled'
|
||||
else if (!feedId && streamsGroupedByChannelId.has(channelId)) result.status = 'fulfilled'
|
||||
else {
|
||||
const channelData = channelsKeyById.get(channelId)
|
||||
if (channelData && channelData.isClosed) result.status = 'closed'
|
||||
}
|
||||
|
||||
channelSearchRequestsBuffer.set(streamId, true)
|
||||
|
||||
report.add(result)
|
||||
})
|
||||
|
||||
report = report.orderBy(item => item.issueNumber).filter(item => item.status !== 'pending')
|
||||
|
||||
console.table(report.all())
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
function truncate(string: string, limit: number = 100) {
|
||||
if (!string) return string
|
||||
if (string.length < limit) return string
|
||||
|
||||
return string.slice(0, limit) + '...'
|
||||
}
|
||||
import { Logger, Collection, Dictionary } from '@freearhey/core'
|
||||
import { IssueLoader, PlaylistParser } from '../../core'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { isURI, truncate } from '../../utils'
|
||||
import { STREAMS_DIR } from '../../constants'
|
||||
import { Issue, Stream } from '../../models'
|
||||
import { data, loadData } from '../../api'
|
||||
|
||||
const status = {
|
||||
PENDING: 'pending',
|
||||
FULFILLED: 'fulfilled',
|
||||
MISSING_CHANNEL_ID: 'missing_channel_id',
|
||||
INVALID_CHANNEL_ID: 'invalid_channel_id',
|
||||
MISSING_STREAM_URL: 'missing_stream_url',
|
||||
INVALID_STREAM_URL: 'invalid_stream_url',
|
||||
NONEXISTENT_LINK: 'nonexistent_link',
|
||||
CHANNEL_BLOCKED: 'channel_blocked',
|
||||
CHANNEL_CLOSED: 'channel_closed',
|
||||
DUPLICATE_LINK: 'duplicate_link',
|
||||
DUPLICATE_REQUEST: 'duplicate_request'
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
const issueLoader = new IssueLoader()
|
||||
let report = new Collection()
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await issueLoader.load()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('loading streams...')
|
||||
const streamsStorage = new Storage(STREAMS_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: streamsStorage
|
||||
})
|
||||
const files = await streamsStorage.list('**/*.m3u')
|
||||
const streams = await parser.parse(files)
|
||||
const streamsGroupedByUrl = streams.groupBy((stream: Stream) => stream.url)
|
||||
const streamsGroupedByChannel = streams.groupBy((stream: Stream) => stream.channel)
|
||||
const streamsGroupedById = streams.groupBy((stream: Stream) => stream.getId())
|
||||
|
||||
logger.info('checking streams:remove requests...')
|
||||
const removeRequests = issues.filter(issue =>
|
||||
issue.labels.find((label: string) => label === 'streams:remove')
|
||||
)
|
||||
removeRequests.forEach((issue: Issue) => {
|
||||
const streamUrls = issue.data.getArray('streamUrl') || []
|
||||
|
||||
if (!streamUrls.length) {
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'streams:remove',
|
||||
streamId: undefined,
|
||||
streamUrl: undefined,
|
||||
status: status.NONEXISTENT_LINK
|
||||
}
|
||||
|
||||
report.add(result)
|
||||
} else {
|
||||
for (const streamUrl of streamUrls) {
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'streams:remove',
|
||||
streamId: undefined,
|
||||
streamUrl: truncate(streamUrl),
|
||||
status: status.PENDING
|
||||
}
|
||||
|
||||
if (streamsGroupedByUrl.missing(streamUrl)) {
|
||||
result.status = status.NONEXISTENT_LINK
|
||||
}
|
||||
|
||||
report.add(result)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
logger.info('checking streams:add requests...')
|
||||
const addRequests = issues.filter(issue => issue.labels.includes('streams:add'))
|
||||
const addRequestsBuffer = new Dictionary()
|
||||
addRequests.forEach((issue: Issue) => {
|
||||
const streamId = issue.data.getString('streamId') || ''
|
||||
const streamUrl = issue.data.getString('streamUrl') || ''
|
||||
const [channelId] = streamId.split('@')
|
||||
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'streams:add',
|
||||
streamId: streamId || undefined,
|
||||
streamUrl: truncate(streamUrl),
|
||||
status: status.PENDING
|
||||
}
|
||||
|
||||
if (!channelId) result.status = status.MISSING_CHANNEL_ID
|
||||
else if (!streamUrl) result.status = status.MISSING_STREAM_URL
|
||||
else if (!isURI(streamUrl)) result.status = status.INVALID_STREAM_URL
|
||||
else if (data.blocklistRecordsGroupedByChannel.has(channelId))
|
||||
result.status = status.CHANNEL_BLOCKED
|
||||
else if (data.channelsKeyById.missing(channelId)) result.status = status.INVALID_CHANNEL_ID
|
||||
else if (streamsGroupedByUrl.has(streamUrl)) result.status = status.DUPLICATE_LINK
|
||||
else if (addRequestsBuffer.has(streamUrl)) result.status = status.DUPLICATE_REQUEST
|
||||
else result.status = status.PENDING
|
||||
|
||||
addRequestsBuffer.set(streamUrl, true)
|
||||
|
||||
report.add(result)
|
||||
})
|
||||
|
||||
logger.info('checking streams:edit requests...')
|
||||
const editRequests = issues.filter(issue =>
|
||||
issue.labels.find((label: string) => label === 'streams:edit')
|
||||
)
|
||||
editRequests.forEach((issue: Issue) => {
|
||||
const streamId = issue.data.getString('streamId') || ''
|
||||
const streamUrl = issue.data.getString('streamUrl') || ''
|
||||
const [channelId] = streamId.split('@')
|
||||
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'streams:edit',
|
||||
streamId: streamId || undefined,
|
||||
streamUrl: truncate(streamUrl),
|
||||
status: status.PENDING
|
||||
}
|
||||
|
||||
if (!streamUrl) result.status = status.MISSING_STREAM_URL
|
||||
else if (streamsGroupedByUrl.missing(streamUrl)) result.status = status.NONEXISTENT_LINK
|
||||
else if (channelId && data.channelsKeyById.missing(channelId))
|
||||
result.status = status.INVALID_CHANNEL_ID
|
||||
|
||||
report.add(result)
|
||||
})
|
||||
|
||||
logger.info('checking channel search requests...')
|
||||
const channelSearchRequests = issues.filter(issue =>
|
||||
issue.labels.find((label: string) => label === 'channel search')
|
||||
)
|
||||
const channelSearchRequestsBuffer = new Dictionary()
|
||||
channelSearchRequests.forEach((issue: Issue) => {
|
||||
const streamId = issue.data.getString('streamId') || issue.data.getString('channelId') || ''
|
||||
const [channelId, feedId] = streamId.split('@')
|
||||
|
||||
const result = {
|
||||
issueNumber: issue.number,
|
||||
type: 'channel search',
|
||||
streamId: streamId || undefined,
|
||||
streamUrl: undefined,
|
||||
status: status.PENDING
|
||||
}
|
||||
|
||||
if (!channelId) result.status = status.MISSING_CHANNEL_ID
|
||||
else if (data.channelsKeyById.missing(channelId)) result.status = status.INVALID_CHANNEL_ID
|
||||
else if (channelSearchRequestsBuffer.has(streamId)) result.status = status.DUPLICATE_REQUEST
|
||||
else if (data.blocklistRecordsGroupedByChannel.has(channelId))
|
||||
result.status = status.CHANNEL_BLOCKED
|
||||
else if (streamsGroupedById.has(streamId)) result.status = status.FULFILLED
|
||||
else if (!feedId && streamsGroupedByChannel.has(channelId)) result.status = status.FULFILLED
|
||||
else {
|
||||
const channelData = data.channelsKeyById.get(channelId)
|
||||
if (channelData && channelData.isClosed()) result.status = status.CHANNEL_CLOSED
|
||||
}
|
||||
|
||||
channelSearchRequestsBuffer.set(streamId, true)
|
||||
|
||||
report.add(result)
|
||||
})
|
||||
|
||||
report = report.sortBy(item => item.issueNumber).filter(item => item.status !== status.PENDING)
|
||||
|
||||
console.table(report.all())
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
export const ROOT_DIR = process.env.ROOT_DIR || './'
|
||||
export const STREAMS_DIR = process.env.STREAMS_DIR || './streams'
|
||||
export const PUBLIC_DIR = process.env.PUBLIC_DIR || './.gh-pages'
|
||||
export const README_DIR = process.env.README_DIR || './.readme'
|
||||
export const API_DIR = process.env.API_DIR || './.api'
|
||||
export const DATA_DIR = process.env.DATA_DIR || './temp/data'
|
||||
export const LOGS_DIR = process.env.LOGS_DIR || './temp/logs'
|
||||
export const TESTING = process.env.NODE_ENV === 'test' ? true : false
|
||||
export const OWNER = 'iptv-org'
|
||||
export const REPO = 'iptv'
|
||||
export const EOL = '\r\n'
|
||||
export const ROOT_DIR = process.env.ROOT_DIR || './'
|
||||
export const STREAMS_DIR = process.env.STREAMS_DIR || './streams'
|
||||
export const PUBLIC_DIR = process.env.PUBLIC_DIR || './.gh-pages'
|
||||
export const README_DIR = process.env.README_DIR || './.readme'
|
||||
export const API_DIR = process.env.API_DIR || './.api'
|
||||
export const DATA_DIR = process.env.DATA_DIR || './temp/data'
|
||||
export const LOGS_DIR = process.env.LOGS_DIR || './temp/logs'
|
||||
export const TESTING = process.env.NODE_ENV === 'test' ? true : false
|
||||
export const OWNER = 'iptv-org'
|
||||
export const REPO = 'iptv'
|
||||
export const EOL = '\r\n'
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
import axios, { AxiosInstance, AxiosResponse, AxiosRequestConfig } from 'axios'
|
||||
|
||||
export class ApiClient {
|
||||
instance: AxiosInstance
|
||||
|
||||
constructor() {
|
||||
this.instance = axios.create({
|
||||
baseURL: 'https://iptv-org.github.io/api',
|
||||
responseType: 'stream'
|
||||
})
|
||||
}
|
||||
|
||||
get(url: string, options: AxiosRequestConfig): Promise<AxiosResponse> {
|
||||
return this.instance.get(url, options)
|
||||
}
|
||||
}
|
||||
@@ -1,22 +1,22 @@
|
||||
import { Table } from 'console-table-printer'
|
||||
import { ComplexOptions } from 'console-table-printer/dist/src/models/external-table'
|
||||
|
||||
export class CliTable {
|
||||
table: Table
|
||||
|
||||
constructor(options?: ComplexOptions | string[]) {
|
||||
this.table = new Table(options)
|
||||
}
|
||||
|
||||
append(row) {
|
||||
this.table.addRow(row)
|
||||
}
|
||||
|
||||
render() {
|
||||
this.table.printTable()
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.table.render()
|
||||
}
|
||||
}
|
||||
import { ComplexOptions } from 'console-table-printer/dist/src/models/external-table'
|
||||
import { Table } from 'console-table-printer'
|
||||
|
||||
export class CliTable {
|
||||
table: Table
|
||||
|
||||
constructor(options?: ComplexOptions | string[]) {
|
||||
this.table = new Table(options)
|
||||
}
|
||||
|
||||
append(row) {
|
||||
this.table.addRow(row)
|
||||
}
|
||||
|
||||
render() {
|
||||
this.table.printTable()
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.table.render()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,113 +0,0 @@
|
||||
import { ApiClient } from './apiClient'
|
||||
import { Storage } from '@freearhey/core'
|
||||
import cliProgress, { MultiBar } from 'cli-progress'
|
||||
import type { DataLoaderProps, DataLoaderData } from '../types/dataLoader'
|
||||
|
||||
const formatBytes = (bytes: number) => {
|
||||
if (bytes === 0) return '0 B'
|
||||
const k = 1024
|
||||
const sizes = ['B', 'KB', 'MB', 'GB']
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i]
|
||||
}
|
||||
|
||||
export class DataLoader {
|
||||
client: ApiClient
|
||||
storage: Storage
|
||||
progressBar: MultiBar
|
||||
|
||||
constructor(props: DataLoaderProps) {
|
||||
this.client = new ApiClient()
|
||||
this.storage = props.storage
|
||||
this.progressBar = new cliProgress.MultiBar({
|
||||
stopOnComplete: true,
|
||||
hideCursor: true,
|
||||
forceRedraw: true,
|
||||
barsize: 36,
|
||||
format(options, params, payload) {
|
||||
const filename = payload.filename.padEnd(18, ' ')
|
||||
const barsize = options.barsize || 40
|
||||
const percent = (params.progress * 100).toFixed(2)
|
||||
const speed = payload.speed ? formatBytes(payload.speed) + '/s' : 'N/A'
|
||||
const total = formatBytes(params.total)
|
||||
const completeSize = Math.round(params.progress * barsize)
|
||||
const incompleteSize = barsize - completeSize
|
||||
const bar =
|
||||
options.barCompleteString && options.barIncompleteString
|
||||
? options.barCompleteString.substr(0, completeSize) +
|
||||
options.barGlue +
|
||||
options.barIncompleteString.substr(0, incompleteSize)
|
||||
: '-'.repeat(barsize)
|
||||
|
||||
return `${filename} [${bar}] ${percent}% | ETA: ${params.eta}s | ${total} | ${speed}`
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async load(): Promise<DataLoaderData> {
|
||||
const [
|
||||
countries,
|
||||
regions,
|
||||
subdivisions,
|
||||
languages,
|
||||
categories,
|
||||
blocklist,
|
||||
channels,
|
||||
feeds,
|
||||
logos,
|
||||
timezones,
|
||||
guides,
|
||||
streams,
|
||||
cities
|
||||
] = await Promise.all([
|
||||
this.storage.json('countries.json'),
|
||||
this.storage.json('regions.json'),
|
||||
this.storage.json('subdivisions.json'),
|
||||
this.storage.json('languages.json'),
|
||||
this.storage.json('categories.json'),
|
||||
this.storage.json('blocklist.json'),
|
||||
this.storage.json('channels.json'),
|
||||
this.storage.json('feeds.json'),
|
||||
this.storage.json('logos.json'),
|
||||
this.storage.json('timezones.json'),
|
||||
this.storage.json('guides.json'),
|
||||
this.storage.json('streams.json'),
|
||||
this.storage.json('cities.json')
|
||||
])
|
||||
|
||||
return {
|
||||
countries,
|
||||
regions,
|
||||
subdivisions,
|
||||
languages,
|
||||
categories,
|
||||
blocklist,
|
||||
channels,
|
||||
feeds,
|
||||
logos,
|
||||
timezones,
|
||||
guides,
|
||||
streams,
|
||||
cities
|
||||
}
|
||||
}
|
||||
|
||||
async download(filename: string) {
|
||||
if (!this.storage || !this.progressBar) return
|
||||
|
||||
const stream = await this.storage.createStream(filename)
|
||||
const progressBar = this.progressBar.create(0, 0, { filename })
|
||||
|
||||
this.client
|
||||
.get(filename, {
|
||||
responseType: 'stream',
|
||||
onDownloadProgress({ total, loaded, rate }) {
|
||||
if (total) progressBar.setTotal(total)
|
||||
progressBar.update(loaded, { speed: rate })
|
||||
}
|
||||
})
|
||||
.then(response => {
|
||||
response.data.pipe(stream)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,165 +0,0 @@
|
||||
import { DataProcessorData } from '../types/dataProcessor'
|
||||
import { DataLoaderData } from '../types/dataLoader'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import {
|
||||
BlocklistRecord,
|
||||
Subdivision,
|
||||
Category,
|
||||
Language,
|
||||
Timezone,
|
||||
Channel,
|
||||
Country,
|
||||
Region,
|
||||
Stream,
|
||||
Guide,
|
||||
City,
|
||||
Feed,
|
||||
Logo
|
||||
} from '../models'
|
||||
|
||||
export class DataProcessor {
|
||||
process(data: DataLoaderData): DataProcessorData {
|
||||
let regions = new Collection(data.regions).map(data => new Region(data))
|
||||
let regionsKeyByCode = regions.keyBy((region: Region) => region.code)
|
||||
|
||||
const categories = new Collection(data.categories).map(data => new Category(data))
|
||||
const categoriesKeyById = categories.keyBy((category: Category) => category.id)
|
||||
|
||||
const languages = new Collection(data.languages).map(data => new Language(data))
|
||||
const languagesKeyByCode = languages.keyBy((language: Language) => language.code)
|
||||
|
||||
let subdivisions = new Collection(data.subdivisions).map(data => new Subdivision(data))
|
||||
let subdivisionsKeyByCode = subdivisions.keyBy((subdivision: Subdivision) => subdivision.code)
|
||||
let subdivisionsGroupedByCountryCode = subdivisions.groupBy(
|
||||
(subdivision: Subdivision) => subdivision.countryCode
|
||||
)
|
||||
|
||||
let countries = new Collection(data.countries).map(data => new Country(data))
|
||||
let countriesKeyByCode = countries.keyBy((country: Country) => country.code)
|
||||
|
||||
const cities = new Collection(data.cities).map(data =>
|
||||
new City(data)
|
||||
.withRegions(regions)
|
||||
.withCountry(countriesKeyByCode)
|
||||
.withSubdivision(subdivisionsKeyByCode)
|
||||
)
|
||||
const citiesKeyByCode = cities.keyBy((city: City) => city.code)
|
||||
const citiesGroupedByCountryCode = cities.groupBy((city: City) => city.countryCode)
|
||||
const citiesGroupedBySubdivisionCode = cities.groupBy((city: City) => city.subdivisionCode)
|
||||
|
||||
const timezones = new Collection(data.timezones).map(data =>
|
||||
new Timezone(data).withCountries(countriesKeyByCode)
|
||||
)
|
||||
const timezonesKeyById = timezones.keyBy((timezone: Timezone) => timezone.id)
|
||||
|
||||
const blocklistRecords = new Collection(data.blocklist).map(data => new BlocklistRecord(data))
|
||||
const blocklistRecordsGroupedByChannelId = blocklistRecords.groupBy(
|
||||
(blocklistRecord: BlocklistRecord) => blocklistRecord.channelId
|
||||
)
|
||||
|
||||
let channels = new Collection(data.channels).map(data => new Channel(data))
|
||||
let channelsKeyById = channels.keyBy((channel: Channel) => channel.id)
|
||||
|
||||
let feeds = new Collection(data.feeds).map(data => new Feed(data))
|
||||
let feedsGroupedByChannelId = feeds.groupBy((feed: Feed) => feed.channelId)
|
||||
let feedsGroupedById = feeds.groupBy((feed: Feed) => feed.id)
|
||||
|
||||
const logos = new Collection(data.logos).map(data => new Logo(data).withFeed(feedsGroupedById))
|
||||
const logosGroupedByChannelId = logos.groupBy((logo: Logo) => logo.channelId)
|
||||
const logosGroupedByStreamId = logos.groupBy((logo: Logo) => logo.getStreamId())
|
||||
|
||||
const streams = new Collection(data.streams).map(data =>
|
||||
new Stream(data).withLogos(logosGroupedByStreamId)
|
||||
)
|
||||
const streamsGroupedById = streams.groupBy((stream: Stream) => stream.getId())
|
||||
|
||||
const guides = new Collection(data.guides).map(data => new Guide(data))
|
||||
const guidesGroupedByStreamId = guides.groupBy((guide: Guide) => guide.getStreamId())
|
||||
|
||||
regions = regions.map((region: Region) =>
|
||||
region
|
||||
.withCountries(countriesKeyByCode)
|
||||
.withRegions(regions)
|
||||
.withSubdivisions(subdivisions)
|
||||
.withCities(cities)
|
||||
)
|
||||
regionsKeyByCode = regions.keyBy((region: Region) => region.code)
|
||||
|
||||
countries = countries.map((country: Country) =>
|
||||
country
|
||||
.withCities(citiesGroupedByCountryCode)
|
||||
.withSubdivisions(subdivisionsGroupedByCountryCode)
|
||||
.withRegions(regions)
|
||||
.withLanguage(languagesKeyByCode)
|
||||
)
|
||||
countriesKeyByCode = countries.keyBy((country: Country) => country.code)
|
||||
|
||||
subdivisions = subdivisions.map((subdivision: Subdivision) =>
|
||||
subdivision
|
||||
.withCities(citiesGroupedBySubdivisionCode)
|
||||
.withCountry(countriesKeyByCode)
|
||||
.withRegions(regions)
|
||||
.withParent(subdivisionsKeyByCode)
|
||||
)
|
||||
subdivisionsKeyByCode = subdivisions.keyBy((subdivision: Subdivision) => subdivision.code)
|
||||
subdivisionsGroupedByCountryCode = subdivisions.groupBy(
|
||||
(subdivision: Subdivision) => subdivision.countryCode
|
||||
)
|
||||
|
||||
channels = channels.map((channel: Channel) =>
|
||||
channel
|
||||
.withFeeds(feedsGroupedByChannelId)
|
||||
.withLogos(logosGroupedByChannelId)
|
||||
.withCategories(categoriesKeyById)
|
||||
.withCountry(countriesKeyByCode)
|
||||
.withSubdivision(subdivisionsKeyByCode)
|
||||
.withCategories(categoriesKeyById)
|
||||
)
|
||||
channelsKeyById = channels.keyBy((channel: Channel) => channel.id)
|
||||
|
||||
feeds = feeds.map((feed: Feed) =>
|
||||
feed
|
||||
.withChannel(channelsKeyById)
|
||||
.withLanguages(languagesKeyByCode)
|
||||
.withTimezones(timezonesKeyById)
|
||||
.withBroadcastArea(
|
||||
citiesKeyByCode,
|
||||
subdivisionsKeyByCode,
|
||||
countriesKeyByCode,
|
||||
regionsKeyByCode
|
||||
)
|
||||
)
|
||||
feedsGroupedByChannelId = feeds.groupBy((feed: Feed) => feed.channelId)
|
||||
feedsGroupedById = feeds.groupBy((feed: Feed) => feed.id)
|
||||
|
||||
return {
|
||||
blocklistRecordsGroupedByChannelId,
|
||||
subdivisionsGroupedByCountryCode,
|
||||
feedsGroupedByChannelId,
|
||||
guidesGroupedByStreamId,
|
||||
logosGroupedByStreamId,
|
||||
subdivisionsKeyByCode,
|
||||
countriesKeyByCode,
|
||||
languagesKeyByCode,
|
||||
streamsGroupedById,
|
||||
categoriesKeyById,
|
||||
timezonesKeyById,
|
||||
regionsKeyByCode,
|
||||
blocklistRecords,
|
||||
channelsKeyById,
|
||||
citiesKeyByCode,
|
||||
subdivisions,
|
||||
categories,
|
||||
countries,
|
||||
languages,
|
||||
timezones,
|
||||
channels,
|
||||
regions,
|
||||
streams,
|
||||
cities,
|
||||
guides,
|
||||
feeds,
|
||||
logos
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,46 +1,50 @@
|
||||
type Column = {
|
||||
name: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
}
|
||||
|
||||
type DataItem = string[]
|
||||
|
||||
export class HTMLTable {
|
||||
data: DataItem[]
|
||||
columns: Column[]
|
||||
|
||||
constructor(data: DataItem[], columns: Column[]) {
|
||||
this.data = data
|
||||
this.columns = columns
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = '<table>\r\n'
|
||||
|
||||
output += ' <thead>\r\n <tr>'
|
||||
for (const column of this.columns) {
|
||||
output += `<th align="left">${column.name}</th>`
|
||||
}
|
||||
output += '</tr>\r\n </thead>\r\n'
|
||||
|
||||
output += ' <tbody>\r\n'
|
||||
for (const item of this.data) {
|
||||
output += ' <tr>'
|
||||
let i = 0
|
||||
for (const prop in item) {
|
||||
const column = this.columns[i]
|
||||
const nowrap = column.nowrap ? ' nowrap' : ''
|
||||
const align = column.align ? ` align="${column.align}"` : ''
|
||||
output += `<td${align}${nowrap}>${item[prop]}</td>`
|
||||
i++
|
||||
}
|
||||
output += '</tr>\r\n'
|
||||
}
|
||||
output += ' </tbody>\r\n'
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
import { Collection } from '@freearhey/core'
|
||||
|
||||
export type HTMLTableColumn = {
|
||||
name: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
}
|
||||
|
||||
export type HTMLTableItem = string[]
|
||||
|
||||
export class HTMLTable {
|
||||
data: Collection<HTMLTableItem>
|
||||
columns: Collection<HTMLTableColumn>
|
||||
|
||||
constructor(data: Collection<HTMLTableItem>, columns: Collection<HTMLTableColumn>) {
|
||||
this.data = data
|
||||
this.columns = columns
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = '<table>\r\n'
|
||||
|
||||
output += ' <thead>\r\n <tr>'
|
||||
this.columns.forEach((column: HTMLTableColumn) => {
|
||||
output += `<th align="left">${column.name}</th>`
|
||||
})
|
||||
|
||||
output += '</tr>\r\n </thead>\r\n'
|
||||
|
||||
output += ' <tbody>\r\n'
|
||||
this.data.forEach((item: HTMLTableItem) => {
|
||||
output += ' <tr>'
|
||||
let i = 0
|
||||
for (const prop in item) {
|
||||
const column = this.columns.all()[i]
|
||||
const nowrap = column.nowrap ? ' nowrap' : ''
|
||||
const align = column.align ? ` align="${column.align}"` : ''
|
||||
output += `<td${align}${nowrap}>${item[prop]}</td>`
|
||||
i++
|
||||
}
|
||||
output += '</tr>\r\n'
|
||||
})
|
||||
|
||||
output += ' </tbody>\r\n'
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
export * from './apiClient'
|
||||
export * from './cliTable'
|
||||
export * from './dataProcessor'
|
||||
export * from './dataLoader'
|
||||
export * from './htmlTable'
|
||||
export * from './issueData'
|
||||
export * from './issueLoader'
|
||||
export * from './issueParser'
|
||||
export * from './logParser'
|
||||
export * from './markdown'
|
||||
export * from './numberParser'
|
||||
export * from './playlistParser'
|
||||
export * from './proxyParser'
|
||||
export * from './streamTester'
|
||||
export * from './cliTable'
|
||||
export * from './htmlTable'
|
||||
export * from './issueData'
|
||||
export * from './issueLoader'
|
||||
export * from './issueParser'
|
||||
export * from './logParser'
|
||||
export * from './markdown'
|
||||
export * from './numberParser'
|
||||
export * from './playlistParser'
|
||||
export * from './proxyParser'
|
||||
export * from './streamTester'
|
||||
|
||||
@@ -1,34 +1,36 @@
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
|
||||
export class IssueData {
|
||||
_data: Dictionary
|
||||
constructor(data: Dictionary) {
|
||||
this._data = data
|
||||
}
|
||||
|
||||
has(key: string): boolean {
|
||||
return this._data.has(key)
|
||||
}
|
||||
|
||||
missing(key: string): boolean {
|
||||
return this._data.missing(key) || this._data.get(key) === undefined
|
||||
}
|
||||
|
||||
getBoolean(key: string): boolean {
|
||||
return Boolean(this._data.get(key))
|
||||
}
|
||||
|
||||
getString(key: string): string | undefined {
|
||||
const deleteSymbol = '~'
|
||||
|
||||
return this._data.get(key) === deleteSymbol ? '' : this._data.get(key)
|
||||
}
|
||||
|
||||
getArray(key: string): string[] | undefined {
|
||||
const deleteSymbol = '~'
|
||||
|
||||
if (this._data.missing(key)) return undefined
|
||||
|
||||
return this._data.get(key) === deleteSymbol ? [] : this._data.get(key).split('\r\n')
|
||||
}
|
||||
}
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
|
||||
export class IssueData {
|
||||
_data: Dictionary<string>
|
||||
constructor(data: Dictionary<string>) {
|
||||
this._data = data
|
||||
}
|
||||
|
||||
has(key: string): boolean {
|
||||
return this._data.has(key)
|
||||
}
|
||||
|
||||
missing(key: string): boolean {
|
||||
return this._data.missing(key) || this._data.get(key) === undefined
|
||||
}
|
||||
|
||||
getBoolean(key: string): boolean {
|
||||
return Boolean(this._data.get(key))
|
||||
}
|
||||
|
||||
getString(key: string): string | undefined {
|
||||
const deleteSymbol = '~'
|
||||
|
||||
return this._data.get(key) === deleteSymbol ? '' : this._data.get(key)
|
||||
}
|
||||
|
||||
getArray(key: string): string[] | undefined {
|
||||
const deleteSymbol = '~'
|
||||
|
||||
if (this._data.missing(key)) return undefined
|
||||
|
||||
const value = this._data.get(key)
|
||||
|
||||
return !value || value === deleteSymbol ? [] : value.split('\r\n')
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,37 +1,37 @@
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
|
||||
import { paginateRest } from '@octokit/plugin-paginate-rest'
|
||||
import { Octokit } from '@octokit/core'
|
||||
import { IssueParser } from './'
|
||||
import { TESTING, OWNER, REPO } from '../constants'
|
||||
|
||||
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
|
||||
const octokit = new CustomOctokit()
|
||||
|
||||
export class IssueLoader {
|
||||
async load(props?: { labels: string | string[] }) {
|
||||
let labels = ''
|
||||
if (props && props.labels) {
|
||||
labels = Array.isArray(props.labels) ? props.labels.join(',') : props.labels
|
||||
}
|
||||
let issues: object[] = []
|
||||
if (TESTING) {
|
||||
issues = (await import('../../tests/__data__/input/issues.js')).default
|
||||
} else {
|
||||
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
per_page: 100,
|
||||
labels,
|
||||
status: 'open',
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const parser = new IssueParser()
|
||||
|
||||
return new Collection(issues).map(parser.parse)
|
||||
}
|
||||
}
|
||||
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
|
||||
import { paginateRest } from '@octokit/plugin-paginate-rest'
|
||||
import { TESTING, OWNER, REPO } from '../constants'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Octokit } from '@octokit/core'
|
||||
import { IssueParser } from './'
|
||||
|
||||
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
|
||||
const octokit = new CustomOctokit()
|
||||
|
||||
export class IssueLoader {
|
||||
async load(props?: { labels: string | string[] }) {
|
||||
let labels = ''
|
||||
if (props && props.labels) {
|
||||
labels = Array.isArray(props.labels) ? props.labels.join(',') : props.labels
|
||||
}
|
||||
let issues: object[] = []
|
||||
if (TESTING) {
|
||||
issues = (await import('../../tests/__data__/input/issues.js')).default
|
||||
} else {
|
||||
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
per_page: 100,
|
||||
labels,
|
||||
status: 'open',
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const parser = new IssueParser()
|
||||
|
||||
return new Collection(issues).map(parser.parse)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,48 +1,48 @@
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import { Issue } from '../models'
|
||||
import { IssueData } from './issueData'
|
||||
|
||||
const FIELDS = new Dictionary({
|
||||
'Stream ID': 'streamId',
|
||||
'Channel ID': 'channelId',
|
||||
'Feed ID': 'feedId',
|
||||
'Stream URL': 'streamUrl',
|
||||
'New Stream URL': 'newStreamUrl',
|
||||
Label: 'label',
|
||||
Quality: 'quality',
|
||||
'HTTP User-Agent': 'httpUserAgent',
|
||||
'HTTP User Agent': 'httpUserAgent',
|
||||
'HTTP Referrer': 'httpReferrer',
|
||||
'What happened to the stream?': 'reason',
|
||||
Reason: 'reason',
|
||||
Notes: 'notes',
|
||||
Directives: 'directives'
|
||||
})
|
||||
|
||||
export class IssueParser {
|
||||
parse(issue: { number: number; body: string; labels: { name: string }[] }): Issue {
|
||||
const fields = typeof issue.body === 'string' ? issue.body.split('###') : []
|
||||
|
||||
const data = new Dictionary()
|
||||
fields.forEach((field: string) => {
|
||||
const parsed = typeof field === 'string' ? field.split(/\r?\n/).filter(Boolean) : []
|
||||
let _label = parsed.shift()
|
||||
_label = _label ? _label.replace(/ \(optional\)| \(required\)/, '').trim() : ''
|
||||
let _value = parsed.join('\r\n')
|
||||
_value = _value ? _value.trim() : ''
|
||||
|
||||
if (!_label || !_value) return data
|
||||
|
||||
const id: string = FIELDS.get(_label)
|
||||
const value: string = _value === '_No response_' || _value === 'None' ? '' : _value
|
||||
|
||||
if (!id) return
|
||||
|
||||
data.set(id, value)
|
||||
})
|
||||
|
||||
const labels = issue.labels.map(label => label.name)
|
||||
|
||||
return new Issue({ number: issue.number, labels, data: new IssueData(data) })
|
||||
}
|
||||
}
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import { IssueData } from './issueData'
|
||||
import { Issue } from '../models'
|
||||
|
||||
const FIELDS = new Dictionary({
|
||||
'Stream ID': 'streamId',
|
||||
'Channel ID': 'channelId',
|
||||
'Feed ID': 'feedId',
|
||||
'Stream URL': 'streamUrl',
|
||||
'New Stream URL': 'newStreamUrl',
|
||||
Label: 'label',
|
||||
Quality: 'quality',
|
||||
'HTTP User-Agent': 'httpUserAgent',
|
||||
'HTTP User Agent': 'httpUserAgent',
|
||||
'HTTP Referrer': 'httpReferrer',
|
||||
'What happened to the stream?': 'reason',
|
||||
Reason: 'reason',
|
||||
Notes: 'notes',
|
||||
Directives: 'directives'
|
||||
})
|
||||
|
||||
export class IssueParser {
|
||||
parse(issue: { number: number; body: string; labels: { name: string }[] }): Issue {
|
||||
const fields = typeof issue.body === 'string' ? issue.body.split('###') : []
|
||||
|
||||
const data = new Dictionary<string>()
|
||||
fields.forEach((field: string) => {
|
||||
const parsed = typeof field === 'string' ? field.split(/\r?\n/).filter(Boolean) : []
|
||||
let _label = parsed.shift()
|
||||
_label = _label ? _label.replace(/ \(optional\)| \(required\)/, '').trim() : ''
|
||||
let _value = parsed.join('\r\n')
|
||||
_value = _value ? _value.trim() : ''
|
||||
|
||||
if (!_label || !_value) return data
|
||||
|
||||
const id = FIELDS.get(_label)
|
||||
const value: string = _value === '_No response_' || _value === 'None' ? '' : _value
|
||||
|
||||
if (!id) return
|
||||
|
||||
data.set(id, value)
|
||||
})
|
||||
|
||||
const labels = issue.labels.map(label => label.name)
|
||||
|
||||
return new Issue({ number: issue.number, labels, data: new IssueData(data) })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
export type LogItem = {
|
||||
type: string
|
||||
filepath: string
|
||||
count: number
|
||||
}
|
||||
|
||||
export class LogParser {
|
||||
parse(content: string): LogItem[] {
|
||||
if (!content) return []
|
||||
const lines = content.split('\n')
|
||||
|
||||
return lines.map(line => (line ? JSON.parse(line) : null)).filter(l => l)
|
||||
}
|
||||
}
|
||||
export type LogItem = {
|
||||
type: string
|
||||
filepath: string
|
||||
count: number
|
||||
}
|
||||
|
||||
export class LogParser {
|
||||
parse(content: string): LogItem[] {
|
||||
if (!content) return []
|
||||
const lines = content.split('\n')
|
||||
|
||||
return lines.map(line => (line ? JSON.parse(line) : null)).filter(l => l)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,45 +1,45 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
type MarkdownConfig = {
|
||||
build: string
|
||||
template: string
|
||||
}
|
||||
|
||||
export class Markdown {
|
||||
build: string
|
||||
template: string
|
||||
|
||||
constructor(config: MarkdownConfig) {
|
||||
this.build = config.build
|
||||
this.template = config.template
|
||||
}
|
||||
|
||||
compile() {
|
||||
const workingDir = process.cwd()
|
||||
|
||||
const templatePath = path.resolve(workingDir, this.template)
|
||||
const template = fs.readFileSync(templatePath, 'utf8')
|
||||
const processedContent = this.processIncludes(template, workingDir)
|
||||
|
||||
if (this.build) {
|
||||
const outputPath = path.resolve(workingDir, this.build)
|
||||
fs.writeFileSync(outputPath, processedContent, 'utf8')
|
||||
}
|
||||
}
|
||||
|
||||
private processIncludes(template: string, baseDir: string): string {
|
||||
const includeRegex = /#include\s+"([^"]+)"/g
|
||||
|
||||
return template.replace(includeRegex, (match, includePath) => {
|
||||
try {
|
||||
const fullPath = path.resolve(baseDir, includePath)
|
||||
const includeContent = fs.readFileSync(fullPath, 'utf8')
|
||||
return this.processIncludes(includeContent, baseDir)
|
||||
} catch (error) {
|
||||
console.warn(`Warning: Could not include file ${includePath}: ${error}`)
|
||||
return match
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
import path from 'path'
|
||||
import fs from 'fs'
|
||||
|
||||
type MarkdownConfig = {
|
||||
build: string
|
||||
template: string
|
||||
}
|
||||
|
||||
export class Markdown {
|
||||
build: string
|
||||
template: string
|
||||
|
||||
constructor(config: MarkdownConfig) {
|
||||
this.build = config.build
|
||||
this.template = config.template
|
||||
}
|
||||
|
||||
compile() {
|
||||
const workingDir = process.cwd()
|
||||
|
||||
const templatePath = path.resolve(workingDir, this.template)
|
||||
const template = fs.readFileSync(templatePath, 'utf8')
|
||||
const processedContent = this.processIncludes(template, workingDir)
|
||||
|
||||
if (this.build) {
|
||||
const outputPath = path.resolve(workingDir, this.build)
|
||||
fs.writeFileSync(outputPath, processedContent, 'utf8')
|
||||
}
|
||||
}
|
||||
|
||||
private processIncludes(template: string, baseDir: string): string {
|
||||
const includeRegex = /#include\s+"([^"]+)"/g
|
||||
|
||||
return template.replace(includeRegex, (match, includePath) => {
|
||||
try {
|
||||
const fullPath = path.resolve(baseDir, includePath)
|
||||
const includeContent = fs.readFileSync(fullPath, 'utf8')
|
||||
return this.processIncludes(includeContent, baseDir)
|
||||
} catch (error) {
|
||||
console.warn(`Warning: Could not include file ${includePath}: ${error}`)
|
||||
return match
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
export default class NumberParser {
|
||||
async parse(number: string) {
|
||||
const parsed = parseInt(number)
|
||||
if (isNaN(parsed)) {
|
||||
throw new Error('numberParser:parse() Input value is not a number')
|
||||
}
|
||||
|
||||
return parsed
|
||||
}
|
||||
}
|
||||
export default class NumberParser {
|
||||
async parse(number: string) {
|
||||
const parsed = parseInt(number)
|
||||
if (isNaN(parsed)) {
|
||||
throw new Error('numberParser:parse() Input value is not a number')
|
||||
}
|
||||
|
||||
return parsed
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,60 +1,43 @@
|
||||
import { Collection, Storage, Dictionary } from '@freearhey/core'
|
||||
import parser from 'iptv-playlist-parser'
|
||||
import { Stream } from '../models'
|
||||
|
||||
type PlaylistPareserProps = {
|
||||
storage: Storage
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
logosGroupedByStreamId: Dictionary
|
||||
channelsKeyById: Dictionary
|
||||
}
|
||||
|
||||
export class PlaylistParser {
|
||||
storage: Storage
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
logosGroupedByStreamId: Dictionary
|
||||
channelsKeyById: Dictionary
|
||||
|
||||
constructor({
|
||||
storage,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByStreamId,
|
||||
channelsKeyById
|
||||
}: PlaylistPareserProps) {
|
||||
this.storage = storage
|
||||
this.feedsGroupedByChannelId = feedsGroupedByChannelId
|
||||
this.logosGroupedByStreamId = logosGroupedByStreamId
|
||||
this.channelsKeyById = channelsKeyById
|
||||
}
|
||||
|
||||
async parse(files: string[]): Promise<Collection> {
|
||||
let streams = new Collection()
|
||||
|
||||
for (const filepath of files) {
|
||||
if (!this.storage.existsSync(filepath)) continue
|
||||
|
||||
const _streams: Collection = await this.parseFile(filepath)
|
||||
streams = streams.concat(_streams)
|
||||
}
|
||||
|
||||
return streams
|
||||
}
|
||||
|
||||
async parseFile(filepath: string): Promise<Collection> {
|
||||
const content = await this.storage.load(filepath)
|
||||
const parsed: parser.Playlist = parser.parse(content)
|
||||
|
||||
const streams = new Collection(parsed.items).map((data: parser.PlaylistItem) => {
|
||||
const stream = new Stream()
|
||||
.fromPlaylistItem(data)
|
||||
.withFeed(this.feedsGroupedByChannelId)
|
||||
.withChannel(this.channelsKeyById)
|
||||
.withLogos(this.logosGroupedByStreamId)
|
||||
.setFilepath(filepath)
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
return streams
|
||||
}
|
||||
}
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import parser from 'iptv-playlist-parser'
|
||||
import { Stream } from '../models'
|
||||
|
||||
type PlaylistPareserProps = {
|
||||
storage: Storage
|
||||
}
|
||||
|
||||
export class PlaylistParser {
|
||||
storage: Storage
|
||||
|
||||
constructor({ storage }: PlaylistPareserProps) {
|
||||
this.storage = storage
|
||||
}
|
||||
|
||||
async parse(files: string[]): Promise<Collection<Stream>> {
|
||||
const parsed = new Collection<Stream>()
|
||||
|
||||
for (const filepath of files) {
|
||||
if (!this.storage.existsSync(filepath)) continue
|
||||
const _parsed: Collection<Stream> = await this.parseFile(filepath)
|
||||
parsed.concat(_parsed)
|
||||
}
|
||||
|
||||
return parsed
|
||||
}
|
||||
|
||||
async parseFile(filepath: string): Promise<Collection<Stream>> {
|
||||
const content = await this.storage.load(filepath)
|
||||
const parsed: parser.Playlist = parser.parse(content)
|
||||
|
||||
const streams = new Collection<Stream>()
|
||||
parsed.items.forEach((data: parser.PlaylistItem) => {
|
||||
const stream = Stream.fromPlaylistItem(data)
|
||||
stream.filepath = filepath
|
||||
|
||||
streams.add(stream)
|
||||
})
|
||||
|
||||
return streams
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,31 +1,31 @@
|
||||
import { URL } from 'node:url'
|
||||
|
||||
interface ProxyParserResult {
|
||||
protocol: string | null
|
||||
auth?: {
|
||||
username?: string
|
||||
password?: string
|
||||
}
|
||||
host: string
|
||||
port: number | null
|
||||
}
|
||||
|
||||
export class ProxyParser {
|
||||
parse(_url: string): ProxyParserResult {
|
||||
const parsed = new URL(_url)
|
||||
|
||||
const result: ProxyParserResult = {
|
||||
protocol: parsed.protocol.replace(':', '') || null,
|
||||
host: parsed.hostname,
|
||||
port: parsed.port ? parseInt(parsed.port) : null
|
||||
}
|
||||
|
||||
if (parsed.username || parsed.password) {
|
||||
result.auth = {}
|
||||
if (parsed.username) result.auth.username = parsed.username
|
||||
if (parsed.password) result.auth.password = parsed.password
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
||||
import { URL } from 'node:url'
|
||||
|
||||
interface ProxyParserResult {
|
||||
protocol: string | null
|
||||
auth?: {
|
||||
username?: string
|
||||
password?: string
|
||||
}
|
||||
host: string
|
||||
port: number | null
|
||||
}
|
||||
|
||||
export class ProxyParser {
|
||||
parse(_url: string): ProxyParserResult {
|
||||
const parsed = new URL(_url)
|
||||
|
||||
const result: ProxyParserResult = {
|
||||
protocol: parsed.protocol.replace(':', '') || null,
|
||||
host: parsed.hostname,
|
||||
port: parsed.port ? parseInt(parsed.port) : null
|
||||
}
|
||||
|
||||
if (parsed.username || parsed.password) {
|
||||
result.auth = {}
|
||||
if (parsed.username) result.auth.username = parsed.username
|
||||
if (parsed.password) result.auth.password = parsed.password
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,117 +1,125 @@
|
||||
import { Stream } from '../models'
|
||||
import { TESTING } from '../constants'
|
||||
import mediaInfoFactory from 'mediainfo.js'
|
||||
import axios, { AxiosInstance, AxiosProxyConfig, AxiosRequestConfig } from 'axios'
|
||||
import { ProxyParser } from './proxyParser.js'
|
||||
import { OptionValues } from 'commander'
|
||||
import { SocksProxyAgent } from 'socks-proxy-agent'
|
||||
|
||||
export type TestResult = {
|
||||
status: {
|
||||
ok: boolean
|
||||
code: string
|
||||
}
|
||||
}
|
||||
|
||||
export type StreamTesterProps = {
|
||||
options: OptionValues
|
||||
}
|
||||
|
||||
export class StreamTester {
|
||||
client: AxiosInstance
|
||||
options: OptionValues
|
||||
|
||||
constructor({ options }: StreamTesterProps) {
|
||||
const proxyParser = new ProxyParser()
|
||||
let request: AxiosRequestConfig = {
|
||||
responseType: 'arraybuffer'
|
||||
}
|
||||
|
||||
if (options.proxy !== undefined) {
|
||||
const proxy = proxyParser.parse(options.proxy) as AxiosProxyConfig
|
||||
|
||||
if (
|
||||
proxy.protocol &&
|
||||
['socks', 'socks5', 'socks5h', 'socks4', 'socks4a'].includes(String(proxy.protocol))
|
||||
) {
|
||||
const socksProxyAgent = new SocksProxyAgent(options.proxy)
|
||||
|
||||
request = { ...request, ...{ httpAgent: socksProxyAgent, httpsAgent: socksProxyAgent } }
|
||||
} else {
|
||||
request = { ...request, ...{ proxy } }
|
||||
}
|
||||
}
|
||||
|
||||
this.client = axios.create(request)
|
||||
this.options = options
|
||||
}
|
||||
|
||||
async test(stream: Stream): Promise<TestResult> {
|
||||
if (TESTING) {
|
||||
const results = (await import('../../tests/__data__/input/playlist_test/results.js')).default
|
||||
|
||||
return results[stream.url as keyof typeof results]
|
||||
} else {
|
||||
try {
|
||||
const res = await this.client(stream.url, {
|
||||
signal: AbortSignal.timeout(this.options.timeout),
|
||||
headers: {
|
||||
'User-Agent': stream.getUserAgent() || 'Mozilla/5.0',
|
||||
Referer: stream.getReferrer()
|
||||
}
|
||||
})
|
||||
|
||||
const mediainfo = await mediaInfoFactory({ format: 'object' })
|
||||
const buffer = await res.data
|
||||
const result = await mediainfo.analyzeData(
|
||||
() => buffer.byteLength,
|
||||
(size: any, offset: number | undefined) =>
|
||||
Buffer.from(buffer).subarray(offset, offset + size)
|
||||
)
|
||||
|
||||
if (result && result.media && result.media.track.length > 0) {
|
||||
return {
|
||||
status: {
|
||||
ok: true,
|
||||
code: 'OK'
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
status: {
|
||||
ok: false,
|
||||
code: 'NO_VIDEO'
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error: any) {
|
||||
let code = 'UNKNOWN_ERROR'
|
||||
if (error.name === 'CanceledError') {
|
||||
code = 'TIMEOUT'
|
||||
} else if (error.name === 'AxiosError') {
|
||||
if (error.response) {
|
||||
const status = error.response?.status
|
||||
const statusText = error.response?.statusText.toUpperCase().replace(/\s+/, '_')
|
||||
code = `HTTP_${status}_${statusText}`
|
||||
} else {
|
||||
code = `AXIOS_${error.code}`
|
||||
}
|
||||
} else if (error.cause) {
|
||||
const cause = error.cause as Error & { code?: string }
|
||||
if (cause.code) {
|
||||
code = cause.code
|
||||
} else {
|
||||
code = cause.name
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
status: {
|
||||
ok: false,
|
||||
code
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
import axios, { AxiosInstance, AxiosProxyConfig, AxiosRequestConfig, AxiosResponse } from 'axios'
|
||||
import { SocksProxyAgent } from 'socks-proxy-agent'
|
||||
import { ProxyParser } from './proxyParser.js'
|
||||
import mediaInfoFactory from 'mediainfo.js'
|
||||
import { OptionValues } from 'commander'
|
||||
import { TESTING } from '../constants'
|
||||
import { Stream } from '../models'
|
||||
|
||||
export type StreamTesterResult = {
|
||||
status: {
|
||||
ok: boolean
|
||||
code: string
|
||||
}
|
||||
}
|
||||
|
||||
export type StreamTesterError = {
|
||||
name: string
|
||||
code?: string
|
||||
cause?: Error & { code?: string }
|
||||
response?: AxiosResponse
|
||||
}
|
||||
|
||||
export type StreamTesterProps = {
|
||||
options: OptionValues
|
||||
}
|
||||
|
||||
export class StreamTester {
|
||||
client: AxiosInstance
|
||||
options: OptionValues
|
||||
|
||||
constructor({ options }: StreamTesterProps) {
|
||||
const proxyParser = new ProxyParser()
|
||||
let request: AxiosRequestConfig = {
|
||||
responseType: 'arraybuffer'
|
||||
}
|
||||
|
||||
if (options.proxy !== undefined) {
|
||||
const proxy = proxyParser.parse(options.proxy) as AxiosProxyConfig
|
||||
|
||||
if (
|
||||
proxy.protocol &&
|
||||
['socks', 'socks5', 'socks5h', 'socks4', 'socks4a'].includes(String(proxy.protocol))
|
||||
) {
|
||||
const socksProxyAgent = new SocksProxyAgent(options.proxy)
|
||||
|
||||
request = { ...request, ...{ httpAgent: socksProxyAgent, httpsAgent: socksProxyAgent } }
|
||||
} else {
|
||||
request = { ...request, ...{ proxy } }
|
||||
}
|
||||
}
|
||||
|
||||
this.client = axios.create(request)
|
||||
this.options = options
|
||||
}
|
||||
|
||||
async test(stream: Stream): Promise<StreamTesterResult> {
|
||||
if (TESTING) {
|
||||
const results = (await import('../../tests/__data__/input/playlist_test/results.js')).default
|
||||
|
||||
return results[stream.url as keyof typeof results]
|
||||
} else {
|
||||
try {
|
||||
const res = await this.client(stream.url, {
|
||||
signal: AbortSignal.timeout(this.options.timeout),
|
||||
headers: {
|
||||
'User-Agent': stream.user_agent || 'Mozilla/5.0',
|
||||
Referer: stream.referrer
|
||||
}
|
||||
})
|
||||
|
||||
const mediainfo = await mediaInfoFactory({ format: 'object' })
|
||||
const buffer = await res.data
|
||||
const result = await mediainfo.analyzeData(
|
||||
() => buffer.byteLength,
|
||||
(size: number, offset: number) => Buffer.from(buffer).subarray(offset, offset + size)
|
||||
)
|
||||
|
||||
if (result && result.media && result.media.track.length > 0) {
|
||||
return {
|
||||
status: {
|
||||
ok: true,
|
||||
code: 'OK'
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
status: {
|
||||
ok: false,
|
||||
code: 'NO_VIDEO'
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
const error = err as StreamTesterError
|
||||
|
||||
let code = 'UNKNOWN_ERROR'
|
||||
if (error.name === 'CanceledError') {
|
||||
code = 'TIMEOUT'
|
||||
} else if (error.name === 'AxiosError') {
|
||||
if (error.response) {
|
||||
const status = error.response?.status
|
||||
const statusText = error.response?.statusText.toUpperCase().replace(/\s+/, '_')
|
||||
code = `HTTP_${status}_${statusText}`
|
||||
} else {
|
||||
code = `AXIOS_${error.code}`
|
||||
}
|
||||
} else if (error.cause) {
|
||||
const cause = error.cause
|
||||
if (cause.code) {
|
||||
code = cause.code
|
||||
} else {
|
||||
code = cause.name
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
status: {
|
||||
ok: false,
|
||||
code
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,54 +1,60 @@
|
||||
import { Collection, Storage, File } from '@freearhey/core'
|
||||
import { Stream, Category, Playlist } from '../models'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type CategoriesGeneratorProps = {
|
||||
streams: Collection
|
||||
categories: Collection
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class CategoriesGenerator implements Generator {
|
||||
streams: Collection
|
||||
categories: Collection
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, categories, logFile }: CategoriesGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.categories = categories
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate() {
|
||||
const streams = this.streams.orderBy([(stream: Stream) => stream.getTitle()])
|
||||
|
||||
this.categories.forEach(async (category: Category) => {
|
||||
const categoryStreams = streams
|
||||
.filter((stream: Stream) => stream.hasCategory(category))
|
||||
.map((stream: Stream) => {
|
||||
const groupTitle = stream.getCategoryNames().join(';')
|
||||
if (groupTitle) stream.groupTitle = groupTitle
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
const playlist = new Playlist(categoryStreams, { public: true })
|
||||
const filepath = `categories/${category.id}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'category', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
})
|
||||
|
||||
const undefinedStreams = streams.filter((stream: Stream) => !stream.hasCategories())
|
||||
const playlist = new Playlist(undefinedStreams, { public: true })
|
||||
const filepath = 'categories/undefined.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'category', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Generator } from './generator'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
|
||||
type CategoriesGeneratorProps = {
|
||||
streams: Collection<Stream>
|
||||
categories: Collection<sdk.Models.Category>
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class CategoriesGenerator implements Generator {
|
||||
streams: Collection<Stream>
|
||||
categories: Collection<sdk.Models.Category>
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, categories, logFile }: CategoriesGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.categories = categories
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate() {
|
||||
const streams = this.streams.sortBy([(stream: Stream) => stream.title])
|
||||
|
||||
this.categories.forEach(async (category: sdk.Models.Category) => {
|
||||
const categoryStreams = streams
|
||||
.filter((stream: Stream) => stream.hasCategory(category))
|
||||
.map((stream: Stream) => {
|
||||
const groupTitle = stream
|
||||
.getCategories()
|
||||
.map(category => category.name)
|
||||
.sort()
|
||||
.join(';')
|
||||
if (groupTitle) stream.groupTitle = groupTitle
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
const playlist = new Playlist(categoryStreams, { public: true })
|
||||
const filepath = `categories/${category.id}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'category', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
})
|
||||
|
||||
const undefinedStreams = streams.filter((stream: Stream) => stream.getCategories().isEmpty())
|
||||
const playlist = new Playlist(undefinedStreams, { public: true })
|
||||
const filepath = 'categories/undefined.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'category', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,43 +1,54 @@
|
||||
import { City, Stream, Playlist } from '../models'
|
||||
import { Collection, Storage, File } from '@freearhey/core'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type CitiesGeneratorProps = {
|
||||
streams: Collection
|
||||
cities: Collection
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class CitiesGenerator implements Generator {
|
||||
streams: Collection
|
||||
cities: Collection
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, cities, logFile }: CitiesGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.cities = cities
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams = this.streams
|
||||
.orderBy((stream: Stream) => stream.getTitle())
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
|
||||
this.cities.forEach(async (city: City) => {
|
||||
const cityStreams = streams.filter((stream: Stream) => stream.isBroadcastInCity(city))
|
||||
|
||||
if (cityStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(cityStreams, { public: true })
|
||||
const filepath = `cities/${city.code.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'city', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Generator } from './generator'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
|
||||
type CitiesGeneratorProps = {
|
||||
streams: Collection<Stream>
|
||||
cities: Collection<sdk.Models.City>
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class CitiesGenerator implements Generator {
|
||||
streams: Collection<Stream>
|
||||
cities: Collection<sdk.Models.City>
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, cities, logFile }: CitiesGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.cities = cities
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams = this.streams
|
||||
.sortBy((stream: Stream) => stream.title)
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
|
||||
const streamsGroupedByCityCode = {}
|
||||
streams.forEach((stream: Stream) => {
|
||||
stream.getBroadcastCities().forEach((city: sdk.Models.City) => {
|
||||
if (streamsGroupedByCityCode[city.code]) {
|
||||
streamsGroupedByCityCode[city.code].add(stream)
|
||||
} else {
|
||||
streamsGroupedByCityCode[city.code] = new Collection<Stream>([stream])
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
for (const cityCode in streamsGroupedByCityCode) {
|
||||
const cityStreams = streamsGroupedByCityCode[cityCode]
|
||||
|
||||
const playlist = new Playlist(cityStreams, { public: true })
|
||||
const filepath = `cities/${cityCode.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'city', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,68 +1,80 @@
|
||||
import { Country, Stream, Playlist } from '../models'
|
||||
import { Collection, Storage, File } from '@freearhey/core'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type CountriesGeneratorProps = {
|
||||
streams: Collection
|
||||
countries: Collection
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class CountriesGenerator implements Generator {
|
||||
streams: Collection
|
||||
countries: Collection
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, countries, logFile }: CountriesGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.countries = countries
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams = this.streams
|
||||
.orderBy((stream: Stream) => stream.getTitle())
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
|
||||
this.countries.forEach(async (country: Country) => {
|
||||
const countryStreams = streams.filter((stream: Stream) =>
|
||||
stream.isBroadcastInCountry(country)
|
||||
)
|
||||
if (countryStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(countryStreams, { public: true })
|
||||
const filepath = `countries/${country.code.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'country', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
})
|
||||
|
||||
const internationalStreams = streams.filter((stream: Stream) => stream.isInternational())
|
||||
const internationalPlaylist = new Playlist(internationalStreams, { public: true })
|
||||
const internationalFilepath = 'countries/int.m3u'
|
||||
await this.storage.save(internationalFilepath, internationalPlaylist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({
|
||||
type: 'country',
|
||||
filepath: internationalFilepath,
|
||||
count: internationalPlaylist.streams.count()
|
||||
}) + EOL
|
||||
)
|
||||
|
||||
const undefinedStreams = streams.filter((stream: Stream) => !stream.hasBroadcastArea())
|
||||
const undefinedPlaylist = new Playlist(undefinedStreams, { public: true })
|
||||
const undefinedFilepath = 'countries/undefined.m3u'
|
||||
await this.storage.save(undefinedFilepath, undefinedPlaylist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({
|
||||
type: 'country',
|
||||
filepath: undefinedFilepath,
|
||||
count: undefinedPlaylist.streams.count()
|
||||
}) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Generator } from './generator'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
|
||||
type CountriesGeneratorProps = {
|
||||
streams: Collection<Stream>
|
||||
countries: Collection<sdk.Models.Country>
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class CountriesGenerator implements Generator {
|
||||
streams: Collection<Stream>
|
||||
countries: Collection<sdk.Models.Country>
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, countries, logFile }: CountriesGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.countries = countries
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams = this.streams
|
||||
.sortBy((stream: Stream) => stream.title)
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
|
||||
const streamsGroupedByCountryCode = {}
|
||||
streams.forEach((stream: Stream) => {
|
||||
stream.getBroadcastCountries().forEach((country: sdk.Models.Country) => {
|
||||
if (streamsGroupedByCountryCode[country.code]) {
|
||||
streamsGroupedByCountryCode[country.code].add(stream)
|
||||
} else {
|
||||
streamsGroupedByCountryCode[country.code] = new Collection<Stream>([stream])
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
for (const countryCode in streamsGroupedByCountryCode) {
|
||||
const countryStreams = streamsGroupedByCountryCode[countryCode]
|
||||
|
||||
const playlist = new Playlist(countryStreams, { public: true })
|
||||
const filepath = `countries/${countryCode.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'country', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
|
||||
const internationalStreams = streams.filter((stream: Stream) => stream.isInternational())
|
||||
const internationalPlaylist = new Playlist(internationalStreams, { public: true })
|
||||
const internationalFilepath = 'countries/int.m3u'
|
||||
await this.storage.save(internationalFilepath, internationalPlaylist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({
|
||||
type: 'country',
|
||||
filepath: internationalFilepath,
|
||||
count: internationalPlaylist.streams.count()
|
||||
}) + EOL
|
||||
)
|
||||
|
||||
const undefinedStreams = streams.filter((stream: Stream) =>
|
||||
stream.getBroadcastAreaCodes().isEmpty()
|
||||
)
|
||||
const undefinedPlaylist = new Playlist(undefinedStreams, { public: true })
|
||||
const undefinedFilepath = 'countries/undefined.m3u'
|
||||
await this.storage.save(undefinedFilepath, undefinedPlaylist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({
|
||||
type: 'country',
|
||||
filepath: undefinedFilepath,
|
||||
count: undefinedPlaylist.streams.count()
|
||||
}) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
export interface Generator {
|
||||
generate(): Promise<void>
|
||||
}
|
||||
export interface Generator {
|
||||
generate(): Promise<void>
|
||||
}
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
export * from './categoriesGenerator'
|
||||
export * from './citiesGenerator'
|
||||
export * from './countriesGenerator'
|
||||
export * from './indexCategoryGenerator'
|
||||
export * from './indexCountryGenerator'
|
||||
export * from './indexGenerator'
|
||||
export * from './indexLanguageGenerator'
|
||||
export * from './indexNsfwGenerator'
|
||||
export * from './languagesGenerator'
|
||||
export * from './rawGenerator'
|
||||
export * from './regionsGenerator'
|
||||
export * from './sourcesGenerator'
|
||||
export * from './subdivisionsGenerator'
|
||||
export * from './categoriesGenerator'
|
||||
export * from './citiesGenerator'
|
||||
export * from './countriesGenerator'
|
||||
export * from './indexCategoryGenerator'
|
||||
export * from './indexCountryGenerator'
|
||||
export * from './indexGenerator'
|
||||
export * from './indexLanguageGenerator'
|
||||
export * from './languagesGenerator'
|
||||
export * from './rawGenerator'
|
||||
export * from './regionsGenerator'
|
||||
export * from './sourcesGenerator'
|
||||
export * from './subdivisionsGenerator'
|
||||
|
||||
@@ -1,55 +1,56 @@
|
||||
import { Collection, Storage, File } from '@freearhey/core'
|
||||
import { Stream, Playlist, Category } from '../models'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type IndexCategoryGeneratorProps = {
|
||||
streams: Collection
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class IndexCategoryGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: IndexCategoryGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams = this.streams
|
||||
.orderBy(stream => stream.getTitle())
|
||||
.filter(stream => stream.isSFW())
|
||||
|
||||
let groupedStreams = new Collection()
|
||||
streams.forEach((stream: Stream) => {
|
||||
if (!stream.hasCategories()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.add(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
stream.getCategories().forEach((category: Category) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = category.name
|
||||
groupedStreams.push(streamClone)
|
||||
})
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.orderBy(stream => {
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZ'
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.category.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'index', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Generator } from './generator'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
|
||||
type IndexCategoryGeneratorProps = {
|
||||
streams: Collection<Stream>
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class IndexCategoryGenerator implements Generator {
|
||||
streams: Collection<Stream>
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: IndexCategoryGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams = this.streams.sortBy(stream => stream.title).filter(stream => stream.isSFW())
|
||||
|
||||
let groupedStreams = new Collection<Stream>()
|
||||
streams.forEach((stream: Stream) => {
|
||||
const streamCategories = stream.getCategories()
|
||||
if (streamCategories.isEmpty()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.add(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
streamCategories.forEach((category: sdk.Models.Category) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = category.name
|
||||
groupedStreams.add(streamClone)
|
||||
})
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.sortBy(stream => {
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZ'
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.category.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'index', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,63 +1,67 @@
|
||||
import { Collection, Storage, File } from '@freearhey/core'
|
||||
import { Stream, Playlist, Country } from '../models'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type IndexCountryGeneratorProps = {
|
||||
streams: Collection
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class IndexCountryGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: IndexCountryGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let groupedStreams = new Collection()
|
||||
|
||||
this.streams
|
||||
.orderBy((stream: Stream) => stream.getTitle())
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
.forEach((stream: Stream) => {
|
||||
if (!stream.hasBroadcastArea()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.add(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
stream.getBroadcastCountries().forEach((country: Country) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = country.name
|
||||
groupedStreams.add(streamClone)
|
||||
})
|
||||
|
||||
if (stream.isInternational()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'International'
|
||||
groupedStreams.add(streamClone)
|
||||
}
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.orderBy((stream: Stream) => {
|
||||
if (stream.groupTitle === 'International') return 'ZZ'
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZZ'
|
||||
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.country.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'index', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Generator } from './generator'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
|
||||
type IndexCountryGeneratorProps = {
|
||||
streams: Collection<Stream>
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class IndexCountryGenerator implements Generator {
|
||||
streams: Collection<Stream>
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: IndexCountryGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let groupedStreams = new Collection<Stream>()
|
||||
|
||||
this.streams
|
||||
.sortBy((stream: Stream) => stream.title)
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
.forEach((stream: Stream) => {
|
||||
const broadcastAreaCountries = stream.getBroadcastCountries()
|
||||
|
||||
if (stream.getBroadcastAreaCodes().isEmpty()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.add(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
broadcastAreaCountries.forEach((country: sdk.Models.Country) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = country.name
|
||||
groupedStreams.add(streamClone)
|
||||
})
|
||||
|
||||
if (stream.isInternational()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'International'
|
||||
groupedStreams.add(streamClone)
|
||||
}
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.sortBy((stream: Stream) => {
|
||||
if (stream.groupTitle === 'International') return 'ZZ'
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZZ'
|
||||
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.country.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'index', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,40 +1,45 @@
|
||||
import { Collection, File, Storage } from '@freearhey/core'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type IndexGeneratorProps = {
|
||||
streams: Collection
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class IndexGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: IndexGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const sfwStreams = this.streams
|
||||
.orderBy(stream => stream.getTitle())
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
.map((stream: Stream) => {
|
||||
const groupTitle = stream.getCategoryNames().join(';')
|
||||
if (groupTitle) stream.groupTitle = groupTitle
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
const playlist = new Playlist(sfwStreams, { public: true })
|
||||
const filepath = 'index.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'index', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type IndexGeneratorProps = {
|
||||
streams: Collection<Stream>
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class IndexGenerator implements Generator {
|
||||
streams: Collection<Stream>
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: IndexGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const sfwStreams = this.streams
|
||||
.sortBy(stream => stream.title)
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
.map((stream: Stream) => {
|
||||
const groupTitle = stream
|
||||
.getCategories()
|
||||
.map(category => category.name)
|
||||
.sort()
|
||||
.join(';')
|
||||
if (groupTitle) stream.groupTitle = groupTitle
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
const playlist = new Playlist(sfwStreams, { public: true })
|
||||
const filepath = 'index.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'index', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,54 +1,57 @@
|
||||
import { Collection, Storage, File } from '@freearhey/core'
|
||||
import { Stream, Playlist, Language } from '../models'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type IndexLanguageGeneratorProps = {
|
||||
streams: Collection
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class IndexLanguageGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: IndexLanguageGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let groupedStreams = new Collection()
|
||||
this.streams
|
||||
.orderBy((stream: Stream) => stream.getTitle())
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
.forEach((stream: Stream) => {
|
||||
if (!stream.hasLanguages()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.add(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
stream.getLanguages().forEach((language: Language) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = language.name
|
||||
groupedStreams.add(streamClone)
|
||||
})
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.orderBy((stream: Stream) => {
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZ'
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.language.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'index', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Generator } from './generator'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
|
||||
type IndexLanguageGeneratorProps = {
|
||||
streams: Collection<Stream>
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class IndexLanguageGenerator implements Generator {
|
||||
streams: Collection<Stream>
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: IndexLanguageGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
let groupedStreams = new Collection<Stream>()
|
||||
this.streams
|
||||
.sortBy((stream: Stream) => stream.title)
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
.forEach((stream: Stream) => {
|
||||
const streamLanguages = stream.getLanguages()
|
||||
if (streamLanguages.isEmpty()) {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = 'Undefined'
|
||||
groupedStreams.add(streamClone)
|
||||
return
|
||||
}
|
||||
|
||||
streamLanguages.forEach((language: sdk.Models.Language) => {
|
||||
const streamClone = stream.clone()
|
||||
streamClone.groupTitle = language.name
|
||||
groupedStreams.add(streamClone)
|
||||
})
|
||||
})
|
||||
|
||||
groupedStreams = groupedStreams.sortBy((stream: Stream) => {
|
||||
if (stream.groupTitle === 'Undefined') return 'ZZ'
|
||||
return stream.groupTitle
|
||||
})
|
||||
|
||||
const playlist = new Playlist(groupedStreams, { public: true })
|
||||
const filepath = 'index.language.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'index', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
import { Collection, File, Storage } from '@freearhey/core'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type IndexNsfwGeneratorProps = {
|
||||
streams: Collection
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class IndexNsfwGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: IndexNsfwGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const allStreams = this.streams.orderBy((stream: Stream) => stream.getTitle())
|
||||
|
||||
const playlist = new Playlist(allStreams, { public: true })
|
||||
const filepath = 'index.nsfw.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'index', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,57 +1,58 @@
|
||||
import { Collection, Storage, File } from '@freearhey/core'
|
||||
import { Playlist, Language, Stream } from '../models'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type LanguagesGeneratorProps = { streams: Collection; logFile: File }
|
||||
|
||||
export class LanguagesGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: LanguagesGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams = this.streams
|
||||
.orderBy((stream: Stream) => stream.getTitle())
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
|
||||
let languages = new Collection()
|
||||
streams.forEach((stream: Stream) => {
|
||||
languages = languages.concat(stream.getLanguages())
|
||||
})
|
||||
|
||||
languages
|
||||
.filter(Boolean)
|
||||
.uniqBy((language: Language) => language.code)
|
||||
.orderBy((language: Language) => language.name)
|
||||
.forEach(async (language: Language) => {
|
||||
const languageStreams = streams.filter((stream: Stream) => stream.hasLanguage(language))
|
||||
|
||||
if (languageStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(languageStreams, { public: true })
|
||||
const filepath = `languages/${language.code}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'language', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
})
|
||||
|
||||
const undefinedStreams = streams.filter((stream: Stream) => !stream.hasLanguages())
|
||||
|
||||
if (undefinedStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(undefinedStreams, { public: true })
|
||||
const filepath = 'languages/undefined.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'language', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Playlist, Stream } from '../models'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Generator } from './generator'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
|
||||
type LanguagesGeneratorProps = { streams: Collection<Stream>; logFile: File }
|
||||
|
||||
export class LanguagesGenerator implements Generator {
|
||||
streams: Collection<Stream>
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: LanguagesGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams: Collection<Stream> = this.streams
|
||||
.sortBy((stream: Stream) => stream.title)
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
|
||||
const languages = new Collection<sdk.Models.Language>()
|
||||
streams.forEach((stream: Stream) => {
|
||||
languages.concat(stream.getLanguages())
|
||||
})
|
||||
|
||||
languages
|
||||
.filter(Boolean)
|
||||
.uniqBy((language: sdk.Models.Language) => language.code)
|
||||
.sortBy((language: sdk.Models.Language) => language.name)
|
||||
.forEach(async (language: sdk.Models.Language) => {
|
||||
const languageStreams = streams.filter((stream: Stream) => stream.hasLanguage(language))
|
||||
|
||||
if (languageStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(languageStreams, { public: true })
|
||||
const filepath = `languages/${language.code}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'language', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
})
|
||||
|
||||
const undefinedStreams = streams.filter((stream: Stream) => stream.getLanguages().isEmpty())
|
||||
if (undefinedStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(undefinedStreams, { public: true })
|
||||
const filepath = 'languages/undefined.m3u'
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'language', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,40 +1,45 @@
|
||||
import { Collection, Storage, File } from '@freearhey/core'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type RawGeneratorProps = {
|
||||
streams: Collection
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class RawGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: RawGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate() {
|
||||
const files = this.streams.groupBy((stream: Stream) => stream.getFilename())
|
||||
|
||||
for (const filename of files.keys()) {
|
||||
const streams = new Collection(files.get(filename)).map((stream: Stream) => {
|
||||
const groupTitle = stream.getCategoryNames().join(';')
|
||||
if (groupTitle) stream.groupTitle = groupTitle
|
||||
|
||||
return stream
|
||||
})
|
||||
const playlist = new Playlist(streams, { public: true })
|
||||
const filepath = `raw/${filename}`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'raw', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type RawGeneratorProps = {
|
||||
streams: Collection<Stream>
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class RawGenerator implements Generator {
|
||||
streams: Collection<Stream>
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: RawGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate() {
|
||||
const files = this.streams.groupBy((stream: Stream) => stream.getFilename())
|
||||
|
||||
for (const filename of files.keys()) {
|
||||
const streams = new Collection(files.get(filename)).map((stream: Stream) => {
|
||||
const groupTitle = stream
|
||||
.getCategories()
|
||||
.map(category => category.name)
|
||||
.sort()
|
||||
.join(';')
|
||||
if (groupTitle) stream.groupTitle = groupTitle
|
||||
|
||||
return stream
|
||||
})
|
||||
const playlist = new Playlist(streams, { public: true })
|
||||
const filepath = `raw/${filename}`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'raw', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,41 +1,54 @@
|
||||
import { Collection, Storage, File } from '@freearhey/core'
|
||||
import { Playlist, Region, Stream } from '../models'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type RegionsGeneratorProps = {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class RegionsGenerator implements Generator {
|
||||
streams: Collection
|
||||
regions: Collection
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, regions, logFile }: RegionsGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.regions = regions
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams = this.streams
|
||||
.orderBy((stream: Stream) => stream.getTitle())
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
|
||||
this.regions.forEach(async (region: Region) => {
|
||||
const regionStreams = streams.filter((stream: Stream) => stream.isBroadcastInRegion(region))
|
||||
|
||||
const playlist = new Playlist(regionStreams, { public: true })
|
||||
const filepath = `regions/${region.code.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'region', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Playlist, Stream } from '../models'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Generator } from './generator'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
|
||||
type RegionsGeneratorProps = {
|
||||
streams: Collection<Stream>
|
||||
regions: Collection<sdk.Models.Region>
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class RegionsGenerator implements Generator {
|
||||
streams: Collection<Stream>
|
||||
regions: Collection<sdk.Models.Region>
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, regions, logFile }: RegionsGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.regions = regions
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams = this.streams
|
||||
.sortBy((stream: Stream) => stream.title)
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
|
||||
const streamsGroupedByRegionCode = {}
|
||||
streams.forEach((stream: Stream) => {
|
||||
stream.getBroadcastRegions().forEach((region: sdk.Models.Region) => {
|
||||
if (streamsGroupedByRegionCode[region.code]) {
|
||||
streamsGroupedByRegionCode[region.code].add(stream)
|
||||
} else {
|
||||
streamsGroupedByRegionCode[region.code] = new Collection<Stream>([stream])
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
for (const regionCode in streamsGroupedByRegionCode) {
|
||||
const regionStreams = streamsGroupedByRegionCode[regionCode]
|
||||
|
||||
const playlist = new Playlist(regionStreams, { public: true })
|
||||
const filepath = `regions/${regionCode.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'region', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,43 +1,49 @@
|
||||
import { Collection, Storage, File, type Dictionary } from '@freearhey/core'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type SourcesGeneratorProps = {
|
||||
streams: Collection
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class SourcesGenerator implements Generator {
|
||||
streams: Collection
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: SourcesGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate() {
|
||||
const files: Dictionary = this.streams.groupBy((stream: Stream) => stream.getFilename())
|
||||
|
||||
for (const filename of files.keys()) {
|
||||
if (!filename) continue
|
||||
|
||||
let streams = new Collection(files.get(filename))
|
||||
streams = streams.map((stream: Stream) => {
|
||||
const groupTitle = stream.getCategoryNames().join(';')
|
||||
if (groupTitle) stream.groupTitle = groupTitle
|
||||
|
||||
return stream
|
||||
})
|
||||
const playlist = new Playlist(streams, { public: true })
|
||||
const filepath = `sources/${filename}`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'source', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type SourcesGeneratorProps = {
|
||||
streams: Collection<Stream>
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class SourcesGenerator implements Generator {
|
||||
streams: Collection<Stream>
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, logFile }: SourcesGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate() {
|
||||
const files: Dictionary<Stream[]> = this.streams.groupBy((stream: Stream) =>
|
||||
stream.getFilename()
|
||||
)
|
||||
|
||||
for (const filename of files.keys()) {
|
||||
if (!filename) continue
|
||||
|
||||
const streams = new Collection<Stream>(files.get(filename)).map((stream: Stream) => {
|
||||
const groupTitle = stream
|
||||
.getCategories()
|
||||
.map(category => category.name)
|
||||
.sort()
|
||||
.join(';')
|
||||
if (groupTitle) stream.groupTitle = groupTitle
|
||||
|
||||
return stream
|
||||
})
|
||||
const playlist = new Playlist(streams, { public: true })
|
||||
const filepath = `sources/${filename}`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'source', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,45 +1,54 @@
|
||||
import { Subdivision, Stream, Playlist } from '../models'
|
||||
import { Collection, Storage, File } from '@freearhey/core'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Generator } from './generator'
|
||||
|
||||
type SubdivisionsGeneratorProps = {
|
||||
streams: Collection
|
||||
subdivisions: Collection
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class SubdivisionsGenerator implements Generator {
|
||||
streams: Collection
|
||||
subdivisions: Collection
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, subdivisions, logFile }: SubdivisionsGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.subdivisions = subdivisions
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams = this.streams
|
||||
.orderBy((stream: Stream) => stream.getTitle())
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
|
||||
this.subdivisions.forEach(async (subdivision: Subdivision) => {
|
||||
const subdivisionStreams = streams.filter((stream: Stream) =>
|
||||
stream.isBroadcastInSubdivision(subdivision)
|
||||
)
|
||||
|
||||
if (subdivisionStreams.isEmpty()) return
|
||||
|
||||
const playlist = new Playlist(subdivisionStreams, { public: true })
|
||||
const filepath = `subdivisions/${subdivision.code.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'subdivision', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { PUBLIC_DIR, EOL } from '../constants'
|
||||
import { Stream, Playlist } from '../models'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Generator } from './generator'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
|
||||
type SubdivisionsGeneratorProps = {
|
||||
streams: Collection<Stream>
|
||||
subdivisions: Collection<sdk.Models.Subdivision>
|
||||
logFile: File
|
||||
}
|
||||
|
||||
export class SubdivisionsGenerator implements Generator {
|
||||
streams: Collection<Stream>
|
||||
subdivisions: Collection<sdk.Models.Subdivision>
|
||||
storage: Storage
|
||||
logFile: File
|
||||
|
||||
constructor({ streams, subdivisions, logFile }: SubdivisionsGeneratorProps) {
|
||||
this.streams = streams.clone()
|
||||
this.subdivisions = subdivisions
|
||||
this.storage = new Storage(PUBLIC_DIR)
|
||||
this.logFile = logFile
|
||||
}
|
||||
|
||||
async generate(): Promise<void> {
|
||||
const streams = this.streams
|
||||
.sortBy((stream: Stream) => stream.title)
|
||||
.filter((stream: Stream) => stream.isSFW())
|
||||
|
||||
const streamsGroupedBySubdivisionCode = {}
|
||||
streams.forEach((stream: Stream) => {
|
||||
stream.getBroadcastSubdivisions().forEach((subdivision: sdk.Models.Subdivision) => {
|
||||
if (streamsGroupedBySubdivisionCode[subdivision.code]) {
|
||||
streamsGroupedBySubdivisionCode[subdivision.code].add(stream)
|
||||
} else {
|
||||
streamsGroupedBySubdivisionCode[subdivision.code] = new Collection<Stream>([stream])
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
for (const subdivisionCode in streamsGroupedBySubdivisionCode) {
|
||||
const subdivisionStreams = streamsGroupedBySubdivisionCode[subdivisionCode]
|
||||
|
||||
const playlist = new Playlist(subdivisionStreams, { public: true })
|
||||
const filepath = `subdivisions/${subdivisionCode.toLowerCase()}.m3u`
|
||||
await this.storage.save(filepath, playlist.toString())
|
||||
this.logFile.append(
|
||||
JSON.stringify({ type: 'subdivision', filepath, count: playlist.streams.count() }) + EOL
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
import type { BlocklistRecordData } from '../types/blocklistRecord'
|
||||
|
||||
export class BlocklistRecord {
|
||||
channelId: string
|
||||
reason: string
|
||||
ref: string
|
||||
|
||||
constructor(data?: BlocklistRecordData) {
|
||||
if (!data) return
|
||||
|
||||
this.channelId = data.channel
|
||||
this.reason = data.reason
|
||||
this.ref = data.ref
|
||||
}
|
||||
}
|
||||
@@ -1,108 +0,0 @@
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { City, Subdivision, Region, Country } from './'
|
||||
|
||||
export class BroadcastArea {
|
||||
codes: Collection
|
||||
citiesIncluded: Collection
|
||||
subdivisionsIncluded: Collection
|
||||
countriesIncluded: Collection
|
||||
regionsIncluded: Collection
|
||||
|
||||
constructor(codes: Collection) {
|
||||
this.codes = codes
|
||||
}
|
||||
|
||||
withLocations(
|
||||
citiesKeyByCode: Dictionary,
|
||||
subdivisionsKeyByCode: Dictionary,
|
||||
countriesKeyByCode: Dictionary,
|
||||
regionsKeyByCode: Dictionary
|
||||
): this {
|
||||
const citiesIncluded = new Collection()
|
||||
const subdivisionsIncluded = new Collection()
|
||||
const countriesIncluded = new Collection()
|
||||
let regionsIncluded = new Collection()
|
||||
|
||||
this.codes.forEach((value: string) => {
|
||||
const [type, code] = value.split('/')
|
||||
|
||||
switch (type) {
|
||||
case 'ct': {
|
||||
const city: City = citiesKeyByCode.get(code)
|
||||
if (!city) return
|
||||
citiesIncluded.add(city)
|
||||
if (city.subdivision) subdivisionsIncluded.add(city.subdivision)
|
||||
if (city.subdivision && city.subdivision.parent)
|
||||
subdivisionsIncluded.add(city.subdivision.parent)
|
||||
if (city.country) countriesIncluded.add(city.country)
|
||||
regionsIncluded = regionsIncluded.concat(city.getRegions())
|
||||
break
|
||||
}
|
||||
case 's': {
|
||||
const subdivision: Subdivision = subdivisionsKeyByCode.get(code)
|
||||
if (!subdivision) return
|
||||
subdivisionsIncluded.add(subdivision)
|
||||
if (subdivision.country) countriesIncluded.add(subdivision.country)
|
||||
regionsIncluded = regionsIncluded.concat(subdivision.getRegions())
|
||||
break
|
||||
}
|
||||
case 'c': {
|
||||
const country: Country = countriesKeyByCode.get(code)
|
||||
if (!country) return
|
||||
countriesIncluded.add(country)
|
||||
regionsIncluded = regionsIncluded.concat(country.getRegions())
|
||||
break
|
||||
}
|
||||
case 'r': {
|
||||
const region: Region = regionsKeyByCode.get(code)
|
||||
if (!region) return
|
||||
regionsIncluded = regionsIncluded.concat(region.getRegions())
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
this.citiesIncluded = citiesIncluded.uniqBy((city: City) => city.code)
|
||||
this.subdivisionsIncluded = subdivisionsIncluded.uniqBy(
|
||||
(subdivision: Subdivision) => subdivision.code
|
||||
)
|
||||
this.countriesIncluded = countriesIncluded.uniqBy((country: Country) => country.code)
|
||||
this.regionsIncluded = regionsIncluded.uniqBy((region: Region) => region.code)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getCountries(): Collection {
|
||||
return this.countriesIncluded || new Collection()
|
||||
}
|
||||
|
||||
getSubdivisions(): Collection {
|
||||
return this.subdivisionsIncluded || new Collection()
|
||||
}
|
||||
|
||||
getCities(): Collection {
|
||||
return this.citiesIncluded || new Collection()
|
||||
}
|
||||
|
||||
getRegions(): Collection {
|
||||
return this.regionsIncluded || new Collection()
|
||||
}
|
||||
|
||||
includesCountry(country: Country): boolean {
|
||||
return this.getCountries().includes((_country: Country) => _country.code === country.code)
|
||||
}
|
||||
|
||||
includesSubdivision(subdivision: Subdivision): boolean {
|
||||
return this.getSubdivisions().includes(
|
||||
(_subdivision: Subdivision) => _subdivision.code === subdivision.code
|
||||
)
|
||||
}
|
||||
|
||||
includesRegion(region: Region): boolean {
|
||||
return this.getRegions().includes((_region: Region) => _region.code === region.code)
|
||||
}
|
||||
|
||||
includesCity(city: City): boolean {
|
||||
return this.getCities().includes((_city: City) => _city.code === city.code)
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
import type { CategoryData, CategorySerializedData } from '../types/category'
|
||||
|
||||
export class Category {
|
||||
id: string
|
||||
name: string
|
||||
|
||||
constructor(data: CategoryData) {
|
||||
this.id = data.id
|
||||
this.name = data.name
|
||||
}
|
||||
|
||||
serialize(): CategorySerializedData {
|
||||
return {
|
||||
id: this.id,
|
||||
name: this.name
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,233 +0,0 @@
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { Category, Country, Feed, Guide, Logo, Stream, Subdivision } from './index'
|
||||
import type { ChannelData, ChannelSearchableData, ChannelSerializedData } from '../types/channel'
|
||||
|
||||
export class Channel {
|
||||
id: string
|
||||
name: string
|
||||
altNames: Collection
|
||||
network?: string
|
||||
owners: Collection
|
||||
countryCode: string
|
||||
country?: Country
|
||||
subdivisionCode?: string
|
||||
subdivision?: Subdivision
|
||||
cityName?: string
|
||||
categoryIds: Collection
|
||||
categories: Collection = new Collection()
|
||||
isNSFW: boolean
|
||||
launched?: string
|
||||
closed?: string
|
||||
replacedBy?: string
|
||||
isClosed: boolean
|
||||
website?: string
|
||||
feeds?: Collection
|
||||
logos: Collection = new Collection()
|
||||
|
||||
constructor(data?: ChannelData) {
|
||||
if (!data) return
|
||||
|
||||
this.id = data.id
|
||||
this.name = data.name
|
||||
this.altNames = new Collection(data.alt_names)
|
||||
this.network = data.network || undefined
|
||||
this.owners = new Collection(data.owners)
|
||||
this.countryCode = data.country
|
||||
this.subdivisionCode = data.subdivision || undefined
|
||||
this.cityName = data.city || undefined
|
||||
this.categoryIds = new Collection(data.categories)
|
||||
this.isNSFW = data.is_nsfw
|
||||
this.launched = data.launched || undefined
|
||||
this.closed = data.closed || undefined
|
||||
this.replacedBy = data.replaced_by || undefined
|
||||
this.website = data.website || undefined
|
||||
this.isClosed = !!data.closed || !!data.replaced_by
|
||||
}
|
||||
|
||||
withSubdivision(subdivisionsKeyByCode: Dictionary): this {
|
||||
if (!this.subdivisionCode) return this
|
||||
|
||||
this.subdivision = subdivisionsKeyByCode.get(this.subdivisionCode)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withCountry(countriesKeyByCode: Dictionary): this {
|
||||
this.country = countriesKeyByCode.get(this.countryCode)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withCategories(categoriesKeyById: Dictionary): this {
|
||||
this.categories = this.categoryIds
|
||||
.map((id: string) => categoriesKeyById.get(id))
|
||||
.filter(Boolean)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withFeeds(feedsGroupedByChannelId: Dictionary): this {
|
||||
this.feeds = new Collection(feedsGroupedByChannelId.get(this.id))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withLogos(logosGroupedByChannelId: Dictionary): this {
|
||||
if (this.id) this.logos = new Collection(logosGroupedByChannelId.get(this.id))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getCountry(): Country | undefined {
|
||||
return this.country
|
||||
}
|
||||
|
||||
getSubdivision(): Subdivision | undefined {
|
||||
return this.subdivision
|
||||
}
|
||||
|
||||
getCategories(): Collection {
|
||||
return this.categories || new Collection()
|
||||
}
|
||||
|
||||
hasCategories(): boolean {
|
||||
return !!this.categories && this.categories.notEmpty()
|
||||
}
|
||||
|
||||
hasCategory(category: Category): boolean {
|
||||
return (
|
||||
!!this.categories &&
|
||||
this.categories.includes((_category: Category) => _category.id === category.id)
|
||||
)
|
||||
}
|
||||
|
||||
getFeeds(): Collection {
|
||||
if (!this.feeds) return new Collection()
|
||||
|
||||
return this.feeds
|
||||
}
|
||||
|
||||
getGuides(): Collection {
|
||||
let guides = new Collection()
|
||||
|
||||
this.getFeeds().forEach((feed: Feed) => {
|
||||
guides = guides.concat(feed.getGuides())
|
||||
})
|
||||
|
||||
return guides
|
||||
}
|
||||
|
||||
getGuideNames(): Collection {
|
||||
return this.getGuides()
|
||||
.map((guide: Guide) => guide.siteName)
|
||||
.uniq()
|
||||
}
|
||||
|
||||
getStreams(): Collection {
|
||||
let streams = new Collection()
|
||||
|
||||
this.getFeeds().forEach((feed: Feed) => {
|
||||
streams = streams.concat(feed.getStreams())
|
||||
})
|
||||
|
||||
return streams
|
||||
}
|
||||
|
||||
getStreamTitles(): Collection {
|
||||
return this.getStreams()
|
||||
.map((stream: Stream) => stream.getTitle())
|
||||
.uniq()
|
||||
}
|
||||
|
||||
getFeedFullNames(): Collection {
|
||||
return this.getFeeds()
|
||||
.map((feed: Feed) => feed.getFullName())
|
||||
.uniq()
|
||||
}
|
||||
|
||||
isSFW(): boolean {
|
||||
return this.isNSFW === false
|
||||
}
|
||||
|
||||
getLogos(): Collection {
|
||||
function feed(logo: Logo): number {
|
||||
if (!logo.feed) return 1
|
||||
if (logo.feed.isMain) return 1
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function format(logo: Logo): number {
|
||||
const levelByFormat = { SVG: 0, PNG: 3, APNG: 1, WebP: 1, AVIF: 1, JPEG: 2, GIF: 1 }
|
||||
|
||||
return logo.format ? levelByFormat[logo.format] : 0
|
||||
}
|
||||
|
||||
function size(logo: Logo): number {
|
||||
return Math.abs(512 - logo.width) + Math.abs(512 - logo.height)
|
||||
}
|
||||
|
||||
return this.logos.orderBy([feed, format, size], ['desc', 'desc', 'asc'], false)
|
||||
}
|
||||
|
||||
getLogo(): Logo | undefined {
|
||||
return this.getLogos().first()
|
||||
}
|
||||
|
||||
hasLogo(): boolean {
|
||||
return this.getLogos().notEmpty()
|
||||
}
|
||||
|
||||
getSearchable(): ChannelSearchableData {
|
||||
return {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
altNames: this.altNames.all(),
|
||||
guideNames: this.getGuideNames().all(),
|
||||
streamTitles: this.getStreamTitles().all(),
|
||||
feedFullNames: this.getFeedFullNames().all()
|
||||
}
|
||||
}
|
||||
|
||||
serialize(): ChannelSerializedData {
|
||||
return {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
altNames: this.altNames.all(),
|
||||
network: this.network,
|
||||
owners: this.owners.all(),
|
||||
countryCode: this.countryCode,
|
||||
country: this.country ? this.country.serialize() : undefined,
|
||||
subdivisionCode: this.subdivisionCode,
|
||||
subdivision: this.subdivision ? this.subdivision.serialize() : undefined,
|
||||
cityName: this.cityName,
|
||||
categoryIds: this.categoryIds.all(),
|
||||
categories: this.categories.map((category: Category) => category.serialize()).all(),
|
||||
isNSFW: this.isNSFW,
|
||||
launched: this.launched,
|
||||
closed: this.closed,
|
||||
replacedBy: this.replacedBy,
|
||||
website: this.website
|
||||
}
|
||||
}
|
||||
|
||||
deserialize(data: ChannelSerializedData): this {
|
||||
this.id = data.id
|
||||
this.name = data.name
|
||||
this.altNames = new Collection(data.altNames)
|
||||
this.network = data.network
|
||||
this.owners = new Collection(data.owners)
|
||||
this.countryCode = data.countryCode
|
||||
this.country = data.country ? new Country().deserialize(data.country) : undefined
|
||||
this.subdivisionCode = data.subdivisionCode
|
||||
this.cityName = data.cityName
|
||||
this.categoryIds = new Collection(data.categoryIds)
|
||||
this.isNSFW = data.isNSFW
|
||||
this.launched = data.launched
|
||||
this.closed = data.closed
|
||||
this.replacedBy = data.replacedBy
|
||||
this.website = data.website
|
||||
|
||||
return this
|
||||
}
|
||||
}
|
||||
@@ -1,78 +0,0 @@
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { Country, Region, Subdivision } from '.'
|
||||
import type { CityData, CitySerializedData } from '../types/city'
|
||||
|
||||
export class City {
|
||||
code: string
|
||||
name: string
|
||||
countryCode: string
|
||||
country?: Country
|
||||
subdivisionCode?: string
|
||||
subdivision?: Subdivision
|
||||
wikidataId: string
|
||||
regions?: Collection
|
||||
|
||||
constructor(data?: CityData) {
|
||||
if (!data) return
|
||||
|
||||
this.code = data.code
|
||||
this.name = data.name
|
||||
this.countryCode = data.country
|
||||
this.subdivisionCode = data.subdivision || undefined
|
||||
this.wikidataId = data.wikidata_id
|
||||
}
|
||||
|
||||
withCountry(countriesKeyByCode: Dictionary): this {
|
||||
this.country = countriesKeyByCode.get(this.countryCode)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withSubdivision(subdivisionsKeyByCode: Dictionary): this {
|
||||
if (!this.subdivisionCode) return this
|
||||
|
||||
this.subdivision = subdivisionsKeyByCode.get(this.subdivisionCode)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withRegions(regions: Collection): this {
|
||||
this.regions = regions.filter((region: Region) =>
|
||||
region.countryCodes.includes(this.countryCode)
|
||||
)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getRegions(): Collection {
|
||||
if (!this.regions) return new Collection()
|
||||
|
||||
return this.regions
|
||||
}
|
||||
|
||||
serialize(): CitySerializedData {
|
||||
return {
|
||||
code: this.code,
|
||||
name: this.name,
|
||||
countryCode: this.countryCode,
|
||||
country: this.country ? this.country.serialize() : undefined,
|
||||
subdivisionCode: this.subdivisionCode || null,
|
||||
subdivision: this.subdivision ? this.subdivision.serialize() : undefined,
|
||||
wikidataId: this.wikidataId
|
||||
}
|
||||
}
|
||||
|
||||
deserialize(data: CitySerializedData): this {
|
||||
this.code = data.code
|
||||
this.name = data.name
|
||||
this.countryCode = data.countryCode
|
||||
this.country = data.country ? new Country().deserialize(data.country) : undefined
|
||||
this.subdivisionCode = data.subdivisionCode || undefined
|
||||
this.subdivision = data.subdivision
|
||||
? new Subdivision().deserialize(data.subdivision)
|
||||
: undefined
|
||||
this.wikidataId = data.wikidataId
|
||||
|
||||
return this
|
||||
}
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { Region, Language, Subdivision } from '.'
|
||||
import type { CountryData, CountrySerializedData } from '../types/country'
|
||||
import { SubdivisionSerializedData } from '../types/subdivision'
|
||||
import { RegionSerializedData } from '../types/region'
|
||||
|
||||
export class Country {
|
||||
code: string
|
||||
name: string
|
||||
flag: string
|
||||
languageCode: string
|
||||
language?: Language
|
||||
subdivisions?: Collection
|
||||
regions?: Collection
|
||||
cities?: Collection
|
||||
|
||||
constructor(data?: CountryData) {
|
||||
if (!data) return
|
||||
|
||||
this.code = data.code
|
||||
this.name = data.name
|
||||
this.flag = data.flag
|
||||
this.languageCode = data.lang
|
||||
}
|
||||
|
||||
withSubdivisions(subdivisionsGroupedByCountryCode: Dictionary): this {
|
||||
this.subdivisions = new Collection(subdivisionsGroupedByCountryCode.get(this.code))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withRegions(regions: Collection): this {
|
||||
this.regions = regions.filter((region: Region) => region.includesCountryCode(this.code))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withCities(citiesGroupedByCountryCode: Dictionary): this {
|
||||
this.cities = new Collection(citiesGroupedByCountryCode.get(this.code))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withLanguage(languagesKeyByCode: Dictionary): this {
|
||||
this.language = languagesKeyByCode.get(this.languageCode)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getLanguage(): Language | undefined {
|
||||
return this.language
|
||||
}
|
||||
|
||||
getRegions(): Collection {
|
||||
return this.regions || new Collection()
|
||||
}
|
||||
|
||||
getSubdivisions(): Collection {
|
||||
return this.subdivisions || new Collection()
|
||||
}
|
||||
|
||||
getCities(): Collection {
|
||||
return this.cities || new Collection()
|
||||
}
|
||||
|
||||
serialize(): CountrySerializedData {
|
||||
return {
|
||||
code: this.code,
|
||||
name: this.name,
|
||||
flag: this.flag,
|
||||
languageCode: this.languageCode,
|
||||
language: this.language ? this.language.serialize() : null,
|
||||
subdivisions: this.subdivisions
|
||||
? this.subdivisions.map((subdivision: Subdivision) => subdivision.serialize()).all()
|
||||
: [],
|
||||
regions: this.regions ? this.regions.map((region: Region) => region.serialize()).all() : []
|
||||
}
|
||||
}
|
||||
|
||||
deserialize(data: CountrySerializedData): this {
|
||||
this.code = data.code
|
||||
this.name = data.name
|
||||
this.flag = data.flag
|
||||
this.languageCode = data.languageCode
|
||||
this.language = data.language ? new Language().deserialize(data.language) : undefined
|
||||
this.subdivisions = new Collection(data.subdivisions).map((data: SubdivisionSerializedData) =>
|
||||
new Subdivision().deserialize(data)
|
||||
)
|
||||
this.regions = new Collection(data.regions).map((data: RegionSerializedData) =>
|
||||
new Region().deserialize(data)
|
||||
)
|
||||
|
||||
return this
|
||||
}
|
||||
}
|
||||
@@ -1,170 +0,0 @@
|
||||
import { Country, Language, Region, Channel, Subdivision, BroadcastArea, City } from './index'
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import type { FeedData } from '../types/feed'
|
||||
|
||||
export class Feed {
|
||||
channelId: string
|
||||
channel?: Channel
|
||||
id: string
|
||||
name: string
|
||||
isMain: boolean
|
||||
broadcastAreaCodes: Collection
|
||||
broadcastArea?: BroadcastArea
|
||||
languageCodes: Collection
|
||||
languages?: Collection
|
||||
timezoneIds: Collection
|
||||
timezones?: Collection
|
||||
videoFormat: string
|
||||
guides?: Collection
|
||||
streams?: Collection
|
||||
|
||||
constructor(data: FeedData) {
|
||||
this.channelId = data.channel
|
||||
this.id = data.id
|
||||
this.name = data.name
|
||||
this.isMain = data.is_main
|
||||
this.broadcastAreaCodes = new Collection(data.broadcast_area)
|
||||
this.languageCodes = new Collection(data.languages)
|
||||
this.timezoneIds = new Collection(data.timezones)
|
||||
this.videoFormat = data.video_format
|
||||
}
|
||||
|
||||
withChannel(channelsKeyById: Dictionary): this {
|
||||
this.channel = channelsKeyById.get(this.channelId)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withStreams(streamsGroupedById: Dictionary): this {
|
||||
this.streams = new Collection(streamsGroupedById.get(`${this.channelId}@${this.id}`))
|
||||
|
||||
if (this.isMain) {
|
||||
this.streams = this.streams.concat(new Collection(streamsGroupedById.get(this.channelId)))
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withGuides(guidesGroupedByStreamId: Dictionary): this {
|
||||
this.guides = new Collection(guidesGroupedByStreamId.get(`${this.channelId}@${this.id}`))
|
||||
|
||||
if (this.isMain) {
|
||||
this.guides = this.guides.concat(new Collection(guidesGroupedByStreamId.get(this.channelId)))
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withLanguages(languagesKeyByCode: Dictionary): this {
|
||||
this.languages = this.languageCodes
|
||||
.map((code: string) => languagesKeyByCode.get(code))
|
||||
.filter(Boolean)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withTimezones(timezonesKeyById: Dictionary): this {
|
||||
this.timezones = this.timezoneIds.map((id: string) => timezonesKeyById.get(id)).filter(Boolean)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withBroadcastArea(
|
||||
citiesKeyByCode: Dictionary,
|
||||
subdivisionsKeyByCode: Dictionary,
|
||||
countriesKeyByCode: Dictionary,
|
||||
regionsKeyByCode: Dictionary
|
||||
): this {
|
||||
this.broadcastArea = new BroadcastArea(this.broadcastAreaCodes).withLocations(
|
||||
citiesKeyByCode,
|
||||
subdivisionsKeyByCode,
|
||||
countriesKeyByCode,
|
||||
regionsKeyByCode
|
||||
)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
hasBroadcastArea(): boolean {
|
||||
return !!this.broadcastArea
|
||||
}
|
||||
|
||||
getBroadcastCountries(): Collection {
|
||||
if (!this.broadcastArea) return new Collection()
|
||||
|
||||
return this.broadcastArea.getCountries()
|
||||
}
|
||||
|
||||
getBroadcastRegions(): Collection {
|
||||
if (!this.broadcastArea) return new Collection()
|
||||
|
||||
return this.broadcastArea.getRegions()
|
||||
}
|
||||
|
||||
getTimezones(): Collection {
|
||||
return this.timezones || new Collection()
|
||||
}
|
||||
|
||||
getLanguages(): Collection {
|
||||
return this.languages || new Collection()
|
||||
}
|
||||
|
||||
hasLanguages(): boolean {
|
||||
return !!this.languages && this.languages.notEmpty()
|
||||
}
|
||||
|
||||
hasLanguage(language: Language): boolean {
|
||||
return (
|
||||
!!this.languages &&
|
||||
this.languages.includes((_language: Language) => _language.code === language.code)
|
||||
)
|
||||
}
|
||||
|
||||
isBroadcastInCity(city: City): boolean {
|
||||
if (!this.broadcastArea) return false
|
||||
|
||||
return this.broadcastArea.includesCity(city)
|
||||
}
|
||||
|
||||
isBroadcastInSubdivision(subdivision: Subdivision): boolean {
|
||||
if (!this.broadcastArea) return false
|
||||
|
||||
return this.broadcastArea.includesSubdivision(subdivision)
|
||||
}
|
||||
|
||||
isBroadcastInCountry(country: Country): boolean {
|
||||
if (!this.broadcastArea) return false
|
||||
|
||||
return this.broadcastArea.includesCountry(country)
|
||||
}
|
||||
|
||||
isBroadcastInRegion(region: Region): boolean {
|
||||
if (!this.broadcastArea) return false
|
||||
|
||||
return this.broadcastArea.includesRegion(region)
|
||||
}
|
||||
|
||||
isInternational(): boolean {
|
||||
if (!this.broadcastArea) return false
|
||||
|
||||
return this.broadcastArea.codes.join(',').includes('r/')
|
||||
}
|
||||
|
||||
getGuides(): Collection {
|
||||
if (!this.guides) return new Collection()
|
||||
|
||||
return this.guides
|
||||
}
|
||||
|
||||
getStreams(): Collection {
|
||||
if (!this.streams) return new Collection()
|
||||
|
||||
return this.streams
|
||||
}
|
||||
|
||||
getFullName(): string {
|
||||
if (!this.channel) return ''
|
||||
|
||||
return `${this.channel.name} ${this.name}`
|
||||
}
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
import type { GuideData, GuideSerializedData } from '../types/guide'
|
||||
|
||||
export class Guide {
|
||||
channelId?: string
|
||||
feedId?: string
|
||||
siteDomain: string
|
||||
siteId: string
|
||||
siteName: string
|
||||
languageCode: string
|
||||
|
||||
constructor(data?: GuideData) {
|
||||
if (!data) return
|
||||
|
||||
this.channelId = data.channel
|
||||
this.feedId = data.feed
|
||||
this.siteDomain = data.site
|
||||
this.siteId = data.site_id
|
||||
this.siteName = data.site_name
|
||||
this.languageCode = data.lang
|
||||
}
|
||||
|
||||
getUUID(): string {
|
||||
return this.getStreamId() + this.siteId
|
||||
}
|
||||
|
||||
getStreamId(): string | undefined {
|
||||
if (!this.channelId) return undefined
|
||||
if (!this.feedId) return this.channelId
|
||||
|
||||
return `${this.channelId}@${this.feedId}`
|
||||
}
|
||||
|
||||
serialize(): GuideSerializedData {
|
||||
return {
|
||||
channelId: this.channelId,
|
||||
feedId: this.feedId,
|
||||
siteDomain: this.siteDomain,
|
||||
siteId: this.siteId,
|
||||
siteName: this.siteName,
|
||||
languageCode: this.languageCode
|
||||
}
|
||||
}
|
||||
|
||||
deserialize(data: GuideSerializedData): this {
|
||||
this.channelId = data.channelId
|
||||
this.feedId = data.feedId
|
||||
this.siteDomain = data.siteDomain
|
||||
this.siteId = data.siteId
|
||||
this.siteName = data.siteName
|
||||
this.languageCode = data.languageCode
|
||||
|
||||
return this
|
||||
}
|
||||
}
|
||||
@@ -1,16 +1,3 @@
|
||||
export * from './blocklistRecord'
|
||||
export * from './broadcastArea'
|
||||
export * from './category'
|
||||
export * from './channel'
|
||||
export * from './city'
|
||||
export * from './country'
|
||||
export * from './feed'
|
||||
export * from './guide'
|
||||
export * from './issue'
|
||||
export * from './language'
|
||||
export * from './logo'
|
||||
export * from './playlist'
|
||||
export * from './region'
|
||||
export * from './stream'
|
||||
export * from './subdivision'
|
||||
export * from './timezone'
|
||||
export * from './issue'
|
||||
export * from './playlist'
|
||||
export * from './stream'
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
import { IssueData } from '../core'
|
||||
|
||||
type IssueProps = {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: IssueData
|
||||
}
|
||||
|
||||
export class Issue {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: IssueData
|
||||
|
||||
constructor({ number, labels, data }: IssueProps) {
|
||||
this.number = number
|
||||
this.labels = labels
|
||||
this.data = data
|
||||
}
|
||||
}
|
||||
import { IssueData } from '../core'
|
||||
|
||||
type IssueProps = {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: IssueData
|
||||
}
|
||||
|
||||
export class Issue {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: IssueData
|
||||
|
||||
constructor({ number, labels, data }: IssueProps) {
|
||||
this.number = number
|
||||
this.labels = labels
|
||||
this.data = data
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
import type { LanguageData, LanguageSerializedData } from '../types/language'
|
||||
|
||||
export class Language {
|
||||
code: string
|
||||
name: string
|
||||
|
||||
constructor(data?: LanguageData) {
|
||||
if (!data) return
|
||||
|
||||
this.code = data.code
|
||||
this.name = data.name
|
||||
}
|
||||
|
||||
serialize(): LanguageSerializedData {
|
||||
return {
|
||||
code: this.code,
|
||||
name: this.name
|
||||
}
|
||||
}
|
||||
|
||||
deserialize(data: LanguageSerializedData): this {
|
||||
this.code = data.code
|
||||
this.name = data.name
|
||||
|
||||
return this
|
||||
}
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
import { Collection, type Dictionary } from '@freearhey/core'
|
||||
import type { LogoData } from '../types/logo'
|
||||
import { type Feed } from './feed'
|
||||
|
||||
export class Logo {
|
||||
channelId: string
|
||||
feedId?: string
|
||||
feed: Feed
|
||||
tags: Collection
|
||||
width: number
|
||||
height: number
|
||||
format?: string
|
||||
url: string
|
||||
|
||||
constructor(data?: LogoData) {
|
||||
if (!data) return
|
||||
|
||||
this.channelId = data.channel
|
||||
this.feedId = data.feed || undefined
|
||||
this.tags = new Collection(data.tags)
|
||||
this.width = data.width
|
||||
this.height = data.height
|
||||
this.format = data.format || undefined
|
||||
this.url = data.url
|
||||
}
|
||||
|
||||
withFeed(feedsKeyById: Dictionary): this {
|
||||
if (!this.feedId) return this
|
||||
|
||||
this.feed = feedsKeyById.get(this.feedId)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getStreamId(): string {
|
||||
if (!this.feedId) return this.channelId
|
||||
|
||||
return `${this.channelId}@${this.feedId}`
|
||||
}
|
||||
}
|
||||
@@ -1,28 +1,28 @@
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Stream } from '../models'
|
||||
|
||||
type PlaylistOptions = {
|
||||
public: boolean
|
||||
}
|
||||
|
||||
export class Playlist {
|
||||
streams: Collection
|
||||
options: {
|
||||
public: boolean
|
||||
}
|
||||
|
||||
constructor(streams: Collection, options?: PlaylistOptions) {
|
||||
this.streams = streams
|
||||
this.options = options || { public: false }
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = '#EXTM3U\r\n'
|
||||
|
||||
this.streams.forEach((stream: Stream) => {
|
||||
output += stream.toString(this.options) + '\r\n'
|
||||
})
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Stream } from '../models'
|
||||
|
||||
type PlaylistOptions = {
|
||||
public: boolean
|
||||
}
|
||||
|
||||
export class Playlist {
|
||||
streams: Collection<Stream>
|
||||
options: {
|
||||
public: boolean
|
||||
}
|
||||
|
||||
constructor(streams: Collection<Stream>, options?: PlaylistOptions) {
|
||||
this.streams = streams
|
||||
this.options = options || { public: false }
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = '#EXTM3U\r\n'
|
||||
|
||||
this.streams.forEach((stream: Stream) => {
|
||||
output += stream.toString(this.options) + '\r\n'
|
||||
})
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,118 +0,0 @@
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { City, Country, Subdivision } from '.'
|
||||
import type { RegionData, RegionSerializedData } from '../types/region'
|
||||
import { CountrySerializedData } from '../types/country'
|
||||
import { SubdivisionSerializedData } from '../types/subdivision'
|
||||
import { CitySerializedData } from '../types/city'
|
||||
|
||||
export class Region {
|
||||
code: string
|
||||
name: string
|
||||
countryCodes: Collection
|
||||
countries?: Collection
|
||||
subdivisions?: Collection
|
||||
cities?: Collection
|
||||
regions?: Collection
|
||||
|
||||
constructor(data?: RegionData) {
|
||||
if (!data) return
|
||||
|
||||
this.code = data.code
|
||||
this.name = data.name
|
||||
this.countryCodes = new Collection(data.countries)
|
||||
}
|
||||
|
||||
withCountries(countriesKeyByCode: Dictionary): this {
|
||||
this.countries = this.countryCodes.map((code: string) => countriesKeyByCode.get(code))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withSubdivisions(subdivisions: Collection): this {
|
||||
this.subdivisions = subdivisions.filter(
|
||||
(subdivision: Subdivision) => this.countryCodes.indexOf(subdivision.countryCode) > -1
|
||||
)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withCities(cities: Collection): this {
|
||||
this.cities = cities.filter((city: City) => this.countryCodes.indexOf(city.countryCode) > -1)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withRegions(regions: Collection): this {
|
||||
this.regions = regions.filter(
|
||||
(region: Region) => !region.countryCodes.intersects(this.countryCodes).isEmpty()
|
||||
)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getSubdivisions(): Collection {
|
||||
if (!this.subdivisions) return new Collection()
|
||||
|
||||
return this.subdivisions
|
||||
}
|
||||
|
||||
getCountries(): Collection {
|
||||
if (!this.countries) return new Collection()
|
||||
|
||||
return this.countries
|
||||
}
|
||||
|
||||
getCities(): Collection {
|
||||
if (!this.cities) return new Collection()
|
||||
|
||||
return this.cities
|
||||
}
|
||||
|
||||
getRegions(): Collection {
|
||||
if (!this.regions) return new Collection()
|
||||
|
||||
return this.regions
|
||||
}
|
||||
|
||||
includesCountryCode(code: string): boolean {
|
||||
return this.countryCodes.includes((countryCode: string) => countryCode === code)
|
||||
}
|
||||
|
||||
isWorldwide(): boolean {
|
||||
return ['INT', 'WW'].includes(this.code)
|
||||
}
|
||||
|
||||
serialize(): RegionSerializedData {
|
||||
return {
|
||||
code: this.code,
|
||||
name: this.name,
|
||||
countryCodes: this.countryCodes.all(),
|
||||
countries: this.getCountries()
|
||||
.map((country: Country) => country.serialize())
|
||||
.all(),
|
||||
subdivisions: this.getSubdivisions()
|
||||
.map((subdivision: Subdivision) => subdivision.serialize())
|
||||
.all(),
|
||||
cities: this.getCities()
|
||||
.map((city: City) => city.serialize())
|
||||
.all()
|
||||
}
|
||||
}
|
||||
|
||||
deserialize(data: RegionSerializedData): this {
|
||||
this.code = data.code
|
||||
this.name = data.name
|
||||
this.countryCodes = new Collection(data.countryCodes)
|
||||
this.countries = new Collection(data.countries).map((data: CountrySerializedData) =>
|
||||
new Country().deserialize(data)
|
||||
)
|
||||
this.subdivisions = new Collection(data.subdivisions).map((data: SubdivisionSerializedData) =>
|
||||
new Subdivision().deserialize(data)
|
||||
)
|
||||
this.cities = new Collection(data.cities).map((data: CitySerializedData) =>
|
||||
new City().deserialize(data)
|
||||
)
|
||||
|
||||
return this
|
||||
}
|
||||
}
|
||||
@@ -1,474 +1,461 @@
|
||||
import {
|
||||
Feed,
|
||||
Channel,
|
||||
Category,
|
||||
Region,
|
||||
Subdivision,
|
||||
Country,
|
||||
Language,
|
||||
Logo,
|
||||
City
|
||||
} from './index'
|
||||
import { URL, Collection, Dictionary } from '@freearhey/core'
|
||||
import type { StreamData } from '../types/stream'
|
||||
import parser from 'iptv-playlist-parser'
|
||||
import { IssueData } from '../core'
|
||||
import path from 'node:path'
|
||||
|
||||
export class Stream {
|
||||
title: string
|
||||
url: string
|
||||
id?: string
|
||||
channelId?: string
|
||||
channel?: Channel
|
||||
feedId?: string
|
||||
feed?: Feed
|
||||
logos: Collection = new Collection()
|
||||
filepath?: string
|
||||
line?: number
|
||||
label?: string
|
||||
verticalResolution?: number
|
||||
isInterlaced?: boolean
|
||||
referrer?: string
|
||||
userAgent?: string
|
||||
groupTitle: string = 'Undefined'
|
||||
removed: boolean = false
|
||||
directives: Collection = new Collection()
|
||||
|
||||
constructor(data?: StreamData) {
|
||||
if (!data) return
|
||||
|
||||
const id =
|
||||
data.channelId && data.feedId ? [data.channelId, data.feedId].join('@') : data.channelId
|
||||
const { verticalResolution, isInterlaced } = parseQuality(data.quality)
|
||||
|
||||
this.id = id || undefined
|
||||
this.channelId = data.channelId || undefined
|
||||
this.feedId = data.feedId || undefined
|
||||
this.title = data.title || ''
|
||||
this.url = data.url
|
||||
this.referrer = data.referrer || undefined
|
||||
this.userAgent = data.userAgent || undefined
|
||||
this.verticalResolution = verticalResolution || undefined
|
||||
this.isInterlaced = isInterlaced || undefined
|
||||
this.label = data.label || undefined
|
||||
this.directives = new Collection(data.directives)
|
||||
}
|
||||
|
||||
update(issueData: IssueData): this {
|
||||
const data = {
|
||||
label: issueData.getString('label'),
|
||||
quality: issueData.getString('quality'),
|
||||
httpUserAgent: issueData.getString('httpUserAgent'),
|
||||
httpReferrer: issueData.getString('httpReferrer'),
|
||||
newStreamUrl: issueData.getString('newStreamUrl'),
|
||||
directives: issueData.getArray('directives')
|
||||
}
|
||||
|
||||
if (data.label !== undefined) this.label = data.label
|
||||
if (data.quality !== undefined) this.setQuality(data.quality)
|
||||
if (data.httpUserAgent !== undefined) this.userAgent = data.httpUserAgent
|
||||
if (data.httpReferrer !== undefined) this.referrer = data.httpReferrer
|
||||
if (data.newStreamUrl !== undefined) this.url = data.newStreamUrl
|
||||
if (data.directives !== undefined) this.directives = new Collection(data.directives)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
fromPlaylistItem(data: parser.PlaylistItem): this {
|
||||
function parseName(name: string): {
|
||||
title: string
|
||||
label: string
|
||||
quality: string
|
||||
} {
|
||||
let title = name
|
||||
const [, label] = title.match(/ \[(.*)\]$/) || [null, '']
|
||||
title = title.replace(new RegExp(` \\[${escapeRegExp(label)}\\]$`), '')
|
||||
const [, quality] = title.match(/ \(([0-9]+p)\)$/) || [null, '']
|
||||
title = title.replace(new RegExp(` \\(${quality}\\)$`), '')
|
||||
|
||||
return { title, label, quality }
|
||||
}
|
||||
|
||||
function parseDirectives(string: string) {
|
||||
const directives = new Collection()
|
||||
|
||||
if (!string) return directives
|
||||
|
||||
const supportedDirectives = ['#EXTVLCOPT', '#KODIPROP']
|
||||
const lines = string.split('\r\n')
|
||||
const regex = new RegExp(`^${supportedDirectives.join('|')}`, 'i')
|
||||
|
||||
lines.forEach((line: string) => {
|
||||
if (regex.test(line)) {
|
||||
directives.add(line.trim())
|
||||
}
|
||||
})
|
||||
|
||||
return directives
|
||||
}
|
||||
|
||||
if (!data.name) throw new Error('"name" property is required')
|
||||
if (!data.url) throw new Error('"url" property is required')
|
||||
|
||||
const [channelId, feedId] = data.tvg.id.split('@')
|
||||
const { title, label, quality } = parseName(data.name)
|
||||
const { verticalResolution, isInterlaced } = parseQuality(quality)
|
||||
|
||||
this.id = data.tvg.id || undefined
|
||||
this.feedId = feedId || undefined
|
||||
this.channelId = channelId || undefined
|
||||
this.line = data.line
|
||||
this.label = label || undefined
|
||||
this.title = title
|
||||
this.verticalResolution = verticalResolution || undefined
|
||||
this.isInterlaced = isInterlaced || undefined
|
||||
this.url = data.url
|
||||
this.referrer = data.http.referrer || undefined
|
||||
this.userAgent = data.http['user-agent'] || undefined
|
||||
this.directives = parseDirectives(data.raw)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withChannel(channelsKeyById: Dictionary): this {
|
||||
if (!this.channelId) return this
|
||||
|
||||
this.channel = channelsKeyById.get(this.channelId)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withFeed(feedsGroupedByChannelId: Dictionary): this {
|
||||
if (!this.channelId) return this
|
||||
|
||||
const channelFeeds = feedsGroupedByChannelId.get(this.channelId) || []
|
||||
if (this.feedId) this.feed = channelFeeds.find((feed: Feed) => feed.id === this.feedId)
|
||||
if (!this.feedId && !this.feed) this.feed = channelFeeds.find((feed: Feed) => feed.isMain)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withLogos(logosGroupedByStreamId: Dictionary): this {
|
||||
if (this.id) this.logos = new Collection(logosGroupedByStreamId.get(this.id))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
setId(id: string): this {
|
||||
this.id = id
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
setChannelId(channelId: string): this {
|
||||
this.channelId = channelId
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
setFeedId(feedId: string | undefined): this {
|
||||
this.feedId = feedId
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
setQuality(quality: string): this {
|
||||
const { verticalResolution, isInterlaced } = parseQuality(quality)
|
||||
|
||||
this.verticalResolution = verticalResolution || undefined
|
||||
this.isInterlaced = isInterlaced || undefined
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getLine(): number {
|
||||
return this.line || -1
|
||||
}
|
||||
|
||||
getFilename(): string {
|
||||
if (!this.filepath) return ''
|
||||
|
||||
return path.basename(this.filepath)
|
||||
}
|
||||
|
||||
setFilepath(filepath: string): this {
|
||||
this.filepath = filepath
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
updateFilepath(): this {
|
||||
if (!this.channel) return this
|
||||
|
||||
this.filepath = `${this.channel.countryCode.toLowerCase()}.m3u`
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getChannelId(): string {
|
||||
return this.channelId || ''
|
||||
}
|
||||
|
||||
getFeedId(): string {
|
||||
if (this.feedId) return this.feedId
|
||||
if (this.feed) return this.feed.id
|
||||
return ''
|
||||
}
|
||||
|
||||
getFilepath(): string {
|
||||
return this.filepath || ''
|
||||
}
|
||||
|
||||
getReferrer(): string {
|
||||
return this.referrer || ''
|
||||
}
|
||||
|
||||
getUserAgent(): string {
|
||||
return this.userAgent || ''
|
||||
}
|
||||
|
||||
getQuality(): string {
|
||||
if (!this.verticalResolution) return ''
|
||||
|
||||
let quality = this.verticalResolution.toString()
|
||||
|
||||
if (this.isInterlaced) quality += 'i'
|
||||
else quality += 'p'
|
||||
|
||||
return quality
|
||||
}
|
||||
|
||||
hasId(): boolean {
|
||||
return !!this.id
|
||||
}
|
||||
|
||||
hasQuality(): boolean {
|
||||
return !!this.verticalResolution
|
||||
}
|
||||
|
||||
getVerticalResolution(): number {
|
||||
if (!this.hasQuality()) return 0
|
||||
|
||||
return parseInt(this.getQuality().replace(/p|i/, ''))
|
||||
}
|
||||
|
||||
updateTitle(): this {
|
||||
if (!this.channel) return this
|
||||
|
||||
this.title = this.channel.name
|
||||
if (this.feed && !this.feed.isMain) {
|
||||
this.title += ` ${this.feed.name}`
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
updateId(): this {
|
||||
if (!this.channel) return this
|
||||
if (this.feed) {
|
||||
this.id = `${this.channel.id}@${this.feed.id}`
|
||||
} else {
|
||||
this.id = this.channel.id
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
normalizeURL() {
|
||||
const url = new URL(this.url)
|
||||
|
||||
this.url = url.normalize().toString()
|
||||
}
|
||||
|
||||
clone(): Stream {
|
||||
return Object.assign(Object.create(Object.getPrototypeOf(this)), this)
|
||||
}
|
||||
|
||||
hasChannel() {
|
||||
return !!this.channel
|
||||
}
|
||||
|
||||
getBroadcastRegions(): Collection {
|
||||
return this.feed ? this.feed.getBroadcastRegions() : new Collection()
|
||||
}
|
||||
|
||||
getBroadcastCountries(): Collection {
|
||||
return this.feed ? this.feed.getBroadcastCountries() : new Collection()
|
||||
}
|
||||
|
||||
hasBroadcastArea(): boolean {
|
||||
return this.feed ? this.feed.hasBroadcastArea() : false
|
||||
}
|
||||
|
||||
isSFW(): boolean {
|
||||
return this.channel ? this.channel.isSFW() : true
|
||||
}
|
||||
|
||||
hasCategories(): boolean {
|
||||
return this.channel ? this.channel.hasCategories() : false
|
||||
}
|
||||
|
||||
hasCategory(category: Category): boolean {
|
||||
return this.channel ? this.channel.hasCategory(category) : false
|
||||
}
|
||||
|
||||
getCategoryNames(): string[] {
|
||||
return this.getCategories()
|
||||
.map((category: Category) => category.name)
|
||||
.sort()
|
||||
.all()
|
||||
}
|
||||
|
||||
getCategories(): Collection {
|
||||
return this.channel ? this.channel.getCategories() : new Collection()
|
||||
}
|
||||
|
||||
getLanguages(): Collection {
|
||||
return this.feed ? this.feed.getLanguages() : new Collection()
|
||||
}
|
||||
|
||||
hasLanguages() {
|
||||
return this.feed ? this.feed.hasLanguages() : false
|
||||
}
|
||||
|
||||
hasLanguage(language: Language) {
|
||||
return this.feed ? this.feed.hasLanguage(language) : false
|
||||
}
|
||||
|
||||
getBroadcastAreaCodes(): Collection {
|
||||
return this.feed ? this.feed.broadcastAreaCodes : new Collection()
|
||||
}
|
||||
|
||||
isBroadcastInCity(city: City): boolean {
|
||||
return this.feed ? this.feed.isBroadcastInCity(city) : false
|
||||
}
|
||||
|
||||
isBroadcastInSubdivision(subdivision: Subdivision): boolean {
|
||||
return this.feed ? this.feed.isBroadcastInSubdivision(subdivision) : false
|
||||
}
|
||||
|
||||
isBroadcastInCountry(country: Country): boolean {
|
||||
return this.feed ? this.feed.isBroadcastInCountry(country) : false
|
||||
}
|
||||
|
||||
isBroadcastInRegion(region: Region): boolean {
|
||||
return this.feed ? this.feed.isBroadcastInRegion(region) : false
|
||||
}
|
||||
|
||||
isInternational(): boolean {
|
||||
return this.feed ? this.feed.isInternational() : false
|
||||
}
|
||||
|
||||
getLogos(): Collection {
|
||||
function format(logo: Logo): number {
|
||||
const levelByFormat = { SVG: 0, PNG: 3, APNG: 1, WebP: 1, AVIF: 1, JPEG: 2, GIF: 1 }
|
||||
|
||||
return logo.format ? levelByFormat[logo.format] : 0
|
||||
}
|
||||
|
||||
function size(logo: Logo): number {
|
||||
return Math.abs(512 - logo.width) + Math.abs(512 - logo.height)
|
||||
}
|
||||
|
||||
return this.logos.orderBy([format, size], ['desc', 'asc'], false)
|
||||
}
|
||||
|
||||
getLogo(): Logo | undefined {
|
||||
return this.getLogos().first()
|
||||
}
|
||||
|
||||
hasLogo(): boolean {
|
||||
return this.getLogos().notEmpty()
|
||||
}
|
||||
|
||||
getLogoUrl(): string {
|
||||
let logo: Logo | undefined
|
||||
|
||||
if (this.hasLogo()) logo = this.getLogo()
|
||||
else logo = this?.channel?.getLogo()
|
||||
|
||||
return logo ? logo.url : ''
|
||||
}
|
||||
|
||||
getTitle(): string {
|
||||
return this.title || ''
|
||||
}
|
||||
|
||||
getFullTitle(): string {
|
||||
let title = `${this.getTitle()}`
|
||||
|
||||
if (this.getQuality()) {
|
||||
title += ` (${this.getQuality()})`
|
||||
}
|
||||
|
||||
if (this.label) {
|
||||
title += ` [${this.label}]`
|
||||
}
|
||||
|
||||
return title
|
||||
}
|
||||
|
||||
getLabel(): string {
|
||||
return this.label || ''
|
||||
}
|
||||
|
||||
getId(): string {
|
||||
return this.id || ''
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
channel: this.channelId || null,
|
||||
feed: this.feedId || null,
|
||||
title: this.title,
|
||||
url: this.url,
|
||||
referrer: this.referrer || null,
|
||||
user_agent: this.userAgent || null,
|
||||
quality: this.getQuality() || null
|
||||
}
|
||||
}
|
||||
|
||||
toString(options: { public: boolean }) {
|
||||
let output = `#EXTINF:-1 tvg-id="${this.getId()}"`
|
||||
|
||||
if (options.public) {
|
||||
output += ` tvg-logo="${this.getLogoUrl()}" group-title="${this.groupTitle}"`
|
||||
}
|
||||
|
||||
if (this.referrer) {
|
||||
output += ` http-referrer="${this.referrer}"`
|
||||
}
|
||||
|
||||
if (this.userAgent) {
|
||||
output += ` http-user-agent="${this.userAgent}"`
|
||||
}
|
||||
|
||||
output += `,${this.getFullTitle()}`
|
||||
|
||||
this.directives.forEach((prop: string) => {
|
||||
output += `\r\n${prop}`
|
||||
})
|
||||
|
||||
output += `\r\n${this.url}`
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
|
||||
function escapeRegExp(text) {
|
||||
return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
|
||||
}
|
||||
|
||||
function parseQuality(quality: string | null): {
|
||||
verticalResolution: number | null
|
||||
isInterlaced: boolean | null
|
||||
} {
|
||||
if (!quality) return { verticalResolution: null, isInterlaced: null }
|
||||
const [, verticalResolutionString] = quality.match(/^(\d+)/) || [null, undefined]
|
||||
const isInterlaced = /i$/i.test(quality)
|
||||
let verticalResolution = 0
|
||||
if (verticalResolutionString) verticalResolution = parseInt(verticalResolutionString)
|
||||
|
||||
return { verticalResolution, isInterlaced }
|
||||
}
|
||||
import { Collection } from '@freearhey/core'
|
||||
import parser from 'iptv-playlist-parser'
|
||||
import { normalizeURL } from '../utils'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
import { IssueData } from '../core'
|
||||
import { data } from '../api'
|
||||
import path from 'node:path'
|
||||
|
||||
export class Stream extends sdk.Models.Stream {
|
||||
directives: Collection<string>
|
||||
filepath?: string
|
||||
line?: number
|
||||
groupTitle: string = 'Undefined'
|
||||
removed: boolean = false
|
||||
tvgId?: string
|
||||
label: string | null
|
||||
|
||||
updateWithIssue(issueData: IssueData): this {
|
||||
const data = {
|
||||
label: issueData.getString('label'),
|
||||
quality: issueData.getString('quality'),
|
||||
httpUserAgent: issueData.getString('httpUserAgent'),
|
||||
httpReferrer: issueData.getString('httpReferrer'),
|
||||
newStreamUrl: issueData.getString('newStreamUrl'),
|
||||
directives: issueData.getArray('directives')
|
||||
}
|
||||
|
||||
if (data.label !== undefined) this.label = data.label
|
||||
if (data.quality !== undefined) this.quality = data.quality
|
||||
if (data.httpUserAgent !== undefined) this.user_agent = data.httpUserAgent
|
||||
if (data.httpReferrer !== undefined) this.referrer = data.httpReferrer
|
||||
if (data.newStreamUrl !== undefined) this.url = data.newStreamUrl
|
||||
if (data.directives !== undefined) this.setDirectives(data.directives)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
static fromPlaylistItem(data: parser.PlaylistItem): Stream {
|
||||
function escapeRegExp(text) {
|
||||
return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
|
||||
}
|
||||
|
||||
function parseName(name: string): {
|
||||
title: string
|
||||
label: string
|
||||
quality: string
|
||||
} {
|
||||
let title = name
|
||||
const [, label] = title.match(/ \[(.*)\]$/) || [null, '']
|
||||
title = title.replace(new RegExp(` \\[${escapeRegExp(label)}\\]$`), '')
|
||||
const [, quality] = title.match(/ \(([0-9]+[p|i])\)$/) || [null, '']
|
||||
title = title.replace(new RegExp(` \\(${quality}\\)$`), '')
|
||||
|
||||
return { title, label, quality }
|
||||
}
|
||||
|
||||
function parseDirectives(string: string): Collection<string> {
|
||||
const directives = new Collection<string>()
|
||||
|
||||
if (!string) return directives
|
||||
|
||||
const supportedDirectives = ['#EXTVLCOPT', '#KODIPROP']
|
||||
const lines = string.split('\r\n')
|
||||
const regex = new RegExp(`^${supportedDirectives.join('|')}`, 'i')
|
||||
|
||||
lines.forEach((line: string) => {
|
||||
if (regex.test(line)) {
|
||||
directives.add(line.trim())
|
||||
}
|
||||
})
|
||||
|
||||
return directives
|
||||
}
|
||||
|
||||
if (!data.name) throw new Error('"name" property is required')
|
||||
if (!data.url) throw new Error('"url" property is required')
|
||||
|
||||
const [channelId, feedId] = data.tvg.id.split('@')
|
||||
const { title, label, quality } = parseName(data.name)
|
||||
|
||||
const stream = new Stream({
|
||||
channel: channelId || null,
|
||||
feed: feedId || null,
|
||||
title: title,
|
||||
quality: quality || null,
|
||||
url: data.url,
|
||||
referrer: data.http.referrer || null,
|
||||
user_agent: data.http['user-agent'] || null
|
||||
})
|
||||
|
||||
stream.tvgId = data.tvg.id
|
||||
stream.line = data.line
|
||||
stream.label = label || null
|
||||
stream.directives = parseDirectives(data.raw)
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
isSFW(): boolean {
|
||||
const channel = this.getChannel()
|
||||
|
||||
if (!channel) return true
|
||||
|
||||
return !channel.is_nsfw
|
||||
}
|
||||
|
||||
getUniqKey(): string {
|
||||
const filepath = this.getFilepath()
|
||||
const tvgId = this.getTvgId()
|
||||
|
||||
return filepath + tvgId + this.url
|
||||
}
|
||||
|
||||
getVerticalResolution(): number {
|
||||
if (!this.quality) return 0
|
||||
|
||||
const [, verticalResolutionString] = this.quality.match(/^(\d+)/) || ['', '0']
|
||||
|
||||
return parseInt(verticalResolutionString)
|
||||
}
|
||||
|
||||
getBroadcastCountries(): Collection<sdk.Models.Country> {
|
||||
const countries = new Collection<sdk.Models.Country>()
|
||||
|
||||
const feed = this.getFeed()
|
||||
if (!feed) return countries
|
||||
|
||||
feed
|
||||
.getBroadcastArea()
|
||||
.getLocations()
|
||||
.forEach((location: sdk.Models.BroadcastAreaLocation) => {
|
||||
let country: sdk.Models.Country | undefined
|
||||
switch (location.type) {
|
||||
case 'country': {
|
||||
country = data.countriesKeyByCode.get(location.code)
|
||||
break
|
||||
}
|
||||
case 'subdivision': {
|
||||
const subdivision = data.subdivisionsKeyByCode.get(location.code)
|
||||
if (!subdivision) break
|
||||
country = data.countriesKeyByCode.get(subdivision.country)
|
||||
break
|
||||
}
|
||||
case 'city': {
|
||||
const city = data.citiesKeyByCode.get(location.code)
|
||||
if (!city) break
|
||||
country = data.countriesKeyByCode.get(city.country)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (country) countries.add(country)
|
||||
})
|
||||
|
||||
return countries.uniqBy((country: sdk.Models.Country) => country.code)
|
||||
}
|
||||
|
||||
getBroadcastSubdivisions(): Collection<sdk.Models.Subdivision> {
|
||||
const subdivisions = new Collection<sdk.Models.Subdivision>()
|
||||
|
||||
const feed = this.getFeed()
|
||||
if (!feed) return subdivisions
|
||||
|
||||
feed
|
||||
.getBroadcastArea()
|
||||
.getLocations()
|
||||
.forEach((location: sdk.Models.BroadcastAreaLocation) => {
|
||||
switch (location.type) {
|
||||
case 'subdivision': {
|
||||
const subdivision = data.subdivisionsKeyByCode.get(location.code)
|
||||
if (!subdivision) break
|
||||
subdivisions.add(subdivision)
|
||||
if (!subdivision.parent) break
|
||||
const parentSubdivision = data.subdivisionsKeyByCode.get(subdivision.parent)
|
||||
if (!parentSubdivision) break
|
||||
subdivisions.add(parentSubdivision)
|
||||
break
|
||||
}
|
||||
case 'city': {
|
||||
const city = data.citiesKeyByCode.get(location.code)
|
||||
if (!city || !city.subdivision) break
|
||||
const subdivision = data.subdivisionsKeyByCode.get(city.subdivision)
|
||||
if (!subdivision) break
|
||||
subdivisions.add(subdivision)
|
||||
if (!subdivision.parent) break
|
||||
const parentSubdivision = data.subdivisionsKeyByCode.get(subdivision.parent)
|
||||
if (!parentSubdivision) break
|
||||
subdivisions.add(parentSubdivision)
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return subdivisions.uniqBy((subdivision: sdk.Models.Subdivision) => subdivision.code)
|
||||
}
|
||||
|
||||
getBroadcastCities(): Collection<sdk.Models.City> {
|
||||
const cities = new Collection<sdk.Models.City>()
|
||||
|
||||
const feed = this.getFeed()
|
||||
if (!feed) return cities
|
||||
|
||||
feed
|
||||
.getBroadcastArea()
|
||||
.getLocations()
|
||||
.forEach((location: sdk.Models.BroadcastAreaLocation) => {
|
||||
if (location.type !== 'city') return
|
||||
|
||||
const city = data.citiesKeyByCode.get(location.code)
|
||||
|
||||
if (city) cities.add(city)
|
||||
})
|
||||
|
||||
return cities.uniqBy((city: sdk.Models.City) => city.code)
|
||||
}
|
||||
|
||||
getBroadcastRegions(): Collection<sdk.Models.Region> {
|
||||
const regions = new Collection<sdk.Models.Region>()
|
||||
|
||||
const feed = this.getFeed()
|
||||
if (!feed) return regions
|
||||
|
||||
feed
|
||||
.getBroadcastArea()
|
||||
.getLocations()
|
||||
.forEach((location: sdk.Models.BroadcastAreaLocation) => {
|
||||
switch (location.type) {
|
||||
case 'region': {
|
||||
const region = data.regionsKeyByCode.get(location.code)
|
||||
if (!region) break
|
||||
regions.add(region)
|
||||
|
||||
const relatedRegions = data.regions.filter((_region: sdk.Models.Region) =>
|
||||
new Collection<string>(_region.countries)
|
||||
.intersects(new Collection<string>(region.countries))
|
||||
.isNotEmpty()
|
||||
)
|
||||
regions.concat(relatedRegions)
|
||||
break
|
||||
}
|
||||
case 'country': {
|
||||
const country = data.countriesKeyByCode.get(location.code)
|
||||
if (!country) break
|
||||
const countryRegions = data.regions.filter((_region: sdk.Models.Region) =>
|
||||
new Collection<string>(_region.countries).includes(
|
||||
(code: string) => code === country.code
|
||||
)
|
||||
)
|
||||
regions.concat(countryRegions)
|
||||
break
|
||||
}
|
||||
case 'subdivision': {
|
||||
const subdivision = data.subdivisionsKeyByCode.get(location.code)
|
||||
if (!subdivision) break
|
||||
const subdivisionRegions = data.regions.filter((_region: sdk.Models.Region) =>
|
||||
new Collection<string>(_region.countries).includes(
|
||||
(code: string) => code === subdivision.country
|
||||
)
|
||||
)
|
||||
regions.concat(subdivisionRegions)
|
||||
break
|
||||
}
|
||||
case 'city': {
|
||||
const city = data.citiesKeyByCode.get(location.code)
|
||||
if (!city) break
|
||||
const cityRegions = data.regions.filter((_region: sdk.Models.Region) =>
|
||||
new Collection<string>(_region.countries).includes(
|
||||
(code: string) => code === city.country
|
||||
)
|
||||
)
|
||||
regions.concat(cityRegions)
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return regions.uniqBy((region: sdk.Models.Region) => region.code)
|
||||
}
|
||||
|
||||
isInternational(): boolean {
|
||||
const feed = this.getFeed()
|
||||
if (!feed) return false
|
||||
|
||||
const broadcastAreaCodes = feed.getBroadcastArea().codes
|
||||
if (broadcastAreaCodes.join(';').includes('r/')) return true
|
||||
if (broadcastAreaCodes.filter(code => code.includes('c/')).length > 1) return true
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
hasCategory(category: sdk.Models.Category): boolean {
|
||||
const channel = this.getChannel()
|
||||
|
||||
if (!channel) return false
|
||||
|
||||
const found = channel.categories.find((id: string) => id === category.id)
|
||||
|
||||
return !!found
|
||||
}
|
||||
|
||||
hasLanguage(language: sdk.Models.Language): boolean {
|
||||
const found = this.getLanguages().find(
|
||||
(_language: sdk.Models.Language) => _language.code === language.code
|
||||
)
|
||||
|
||||
return !!found
|
||||
}
|
||||
|
||||
setDirectives(directives: string[]): this {
|
||||
this.directives = new Collection(directives).filter((directive: string) =>
|
||||
/^(#KODIPROP|#EXTVLCOPT)/.test(directive)
|
||||
)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
updateTvgId(): this {
|
||||
if (!this.channel) return this
|
||||
if (this.feed) {
|
||||
this.tvgId = `${this.channel}@${this.feed}`
|
||||
} else {
|
||||
this.tvgId = this.channel
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
updateFilepath(): this {
|
||||
const channel = this.getChannel()
|
||||
if (!channel) return this
|
||||
|
||||
this.filepath = `${channel.country.toLowerCase()}.m3u`
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
updateTitle(): this {
|
||||
const channel = this.getChannel()
|
||||
|
||||
if (!channel) return this
|
||||
|
||||
const feed = this.getFeed()
|
||||
|
||||
this.title = channel.name
|
||||
if (feed && !feed.is_main) {
|
||||
this.title += ` ${feed.name}`
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
normalizeURL() {
|
||||
this.url = normalizeURL(this.url)
|
||||
}
|
||||
|
||||
getLogos(): Collection<sdk.Models.Logo> {
|
||||
const logos = super.getLogos()
|
||||
|
||||
if (logos.isEmpty()) return new Collection()
|
||||
|
||||
function format(logo: sdk.Models.Logo): number {
|
||||
const levelByFormat = { SVG: 0, PNG: 3, APNG: 1, WebP: 1, AVIF: 1, JPEG: 2, GIF: 1 }
|
||||
|
||||
return logo.format ? levelByFormat[logo.format] : 0
|
||||
}
|
||||
|
||||
function size(logo: sdk.Models.Logo): number {
|
||||
return Math.abs(512 - logo.width) + Math.abs(512 - logo.height)
|
||||
}
|
||||
|
||||
return logos.sortBy([format, size], ['desc', 'asc'], false)
|
||||
}
|
||||
|
||||
getFilepath(): string {
|
||||
return this.filepath || ''
|
||||
}
|
||||
|
||||
getFilename(): string {
|
||||
return path.basename(this.getFilepath())
|
||||
}
|
||||
|
||||
getLine(): number {
|
||||
return this.line || -1
|
||||
}
|
||||
|
||||
getTvgId(): string {
|
||||
if (this.tvgId) return this.tvgId
|
||||
|
||||
return this.getId()
|
||||
}
|
||||
|
||||
getTvgLogo(): string {
|
||||
const logo = this.getLogos().first()
|
||||
|
||||
return logo ? logo.url : ''
|
||||
}
|
||||
|
||||
getFullTitle(): string {
|
||||
let title = `${this.title}`
|
||||
|
||||
if (this.quality) {
|
||||
title += ` (${this.quality})`
|
||||
}
|
||||
|
||||
if (this.label) {
|
||||
title += ` [${this.label}]`
|
||||
}
|
||||
|
||||
return title
|
||||
}
|
||||
|
||||
toString(options: { public?: boolean } = {}) {
|
||||
options = { ...{ public: false }, ...options }
|
||||
|
||||
let output = `#EXTINF:-1 tvg-id="${this.getTvgId()}"`
|
||||
|
||||
if (options.public) {
|
||||
output += ` tvg-logo="${this.getTvgLogo()}" group-title="${this.groupTitle}"`
|
||||
}
|
||||
|
||||
if (this.referrer) {
|
||||
output += ` http-referrer="${this.referrer}"`
|
||||
}
|
||||
|
||||
if (this.user_agent) {
|
||||
output += ` http-user-agent="${this.user_agent}"`
|
||||
}
|
||||
|
||||
output += `,${this.getFullTitle()}`
|
||||
|
||||
this.directives.forEach((prop: string) => {
|
||||
output += `\r\n${prop}`
|
||||
})
|
||||
|
||||
output += `\r\n${this.url}`
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
toObject(): sdk.Types.StreamData {
|
||||
let feedId = this.feed
|
||||
if (!feedId) {
|
||||
const feed = this.getFeed()
|
||||
if (feed) feedId = feed.id
|
||||
}
|
||||
|
||||
return {
|
||||
channel: this.channel,
|
||||
feed: feedId,
|
||||
title: this.title,
|
||||
url: this.url,
|
||||
quality: this.quality,
|
||||
user_agent: this.user_agent,
|
||||
referrer: this.referrer
|
||||
}
|
||||
}
|
||||
|
||||
clone(): Stream {
|
||||
return Object.assign(Object.create(Object.getPrototypeOf(this)), this)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,83 +0,0 @@
|
||||
import { SubdivisionData, SubdivisionSerializedData } from '../types/subdivision'
|
||||
import { Dictionary, Collection } from '@freearhey/core'
|
||||
import { Country, Region } from '.'
|
||||
|
||||
export class Subdivision {
|
||||
code: string
|
||||
name: string
|
||||
countryCode: string
|
||||
country?: Country
|
||||
parentCode?: string
|
||||
parent?: Subdivision
|
||||
regions?: Collection
|
||||
cities?: Collection
|
||||
|
||||
constructor(data?: SubdivisionData) {
|
||||
if (!data) return
|
||||
|
||||
this.code = data.code
|
||||
this.name = data.name
|
||||
this.countryCode = data.country
|
||||
this.parentCode = data.parent || undefined
|
||||
}
|
||||
|
||||
withCountry(countriesKeyByCode: Dictionary): this {
|
||||
this.country = countriesKeyByCode.get(this.countryCode)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withRegions(regions: Collection): this {
|
||||
this.regions = regions.filter((region: Region) =>
|
||||
region.countryCodes.includes(this.countryCode)
|
||||
)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withCities(citiesGroupedBySubdivisionCode: Dictionary): this {
|
||||
this.cities = new Collection(citiesGroupedBySubdivisionCode.get(this.code))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withParent(subdivisionsKeyByCode: Dictionary): this {
|
||||
if (!this.parentCode) return this
|
||||
|
||||
this.parent = subdivisionsKeyByCode.get(this.parentCode)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getRegions(): Collection {
|
||||
if (!this.regions) return new Collection()
|
||||
|
||||
return this.regions
|
||||
}
|
||||
|
||||
getCities(): Collection {
|
||||
if (!this.cities) return new Collection()
|
||||
|
||||
return this.cities
|
||||
}
|
||||
|
||||
serialize(): SubdivisionSerializedData {
|
||||
return {
|
||||
code: this.code,
|
||||
name: this.name,
|
||||
countryCode: this.countryCode,
|
||||
country: this.country ? this.country.serialize() : undefined,
|
||||
parentCode: this.parentCode || null
|
||||
}
|
||||
}
|
||||
|
||||
deserialize(data: SubdivisionSerializedData): this {
|
||||
this.code = data.code
|
||||
this.name = data.name
|
||||
this.countryCode = data.countryCode
|
||||
this.country = data.country ? new Country().deserialize(data.country) : undefined
|
||||
this.parentCode = data.parentCode || undefined
|
||||
|
||||
return this
|
||||
}
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
|
||||
type TimezoneData = {
|
||||
id: string
|
||||
utc_offset: string
|
||||
countries: string[]
|
||||
}
|
||||
|
||||
export class Timezone {
|
||||
id: string
|
||||
utcOffset: string
|
||||
countryCodes: Collection
|
||||
countries?: Collection
|
||||
|
||||
constructor(data: TimezoneData) {
|
||||
this.id = data.id
|
||||
this.utcOffset = data.utc_offset
|
||||
this.countryCodes = new Collection(data.countries)
|
||||
}
|
||||
|
||||
withCountries(countriesKeyByCode: Dictionary): this {
|
||||
this.countries = this.countryCodes.map((code: string) => countriesKeyByCode.get(code))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getCountries(): Collection {
|
||||
return this.countries || new Collection()
|
||||
}
|
||||
}
|
||||
@@ -1,56 +1,63 @@
|
||||
import { Storage, Collection, File, Dictionary } from '@freearhey/core'
|
||||
import { HTMLTable, LogParser, LogItem } from '../core'
|
||||
import { LOGS_DIR, README_DIR } from '../constants'
|
||||
import { Category } from '../models'
|
||||
import { Table } from './table'
|
||||
|
||||
type CategoriesTableProps = {
|
||||
categoriesKeyById: Dictionary
|
||||
}
|
||||
|
||||
export class CategoriesTable implements Table {
|
||||
categoriesKeyById: Dictionary
|
||||
|
||||
constructor({ categoriesKeyById }: CategoriesTableProps) {
|
||||
this.categoriesKeyById = categoriesKeyById
|
||||
}
|
||||
|
||||
async make() {
|
||||
const parser = new LogParser()
|
||||
const logsStorage = new Storage(LOGS_DIR)
|
||||
const generatorsLog = await logsStorage.load('generators.log')
|
||||
|
||||
let items = new Collection()
|
||||
parser
|
||||
.parse(generatorsLog)
|
||||
.filter((logItem: LogItem) => logItem.type === 'category')
|
||||
.forEach((logItem: LogItem) => {
|
||||
const file = new File(logItem.filepath)
|
||||
const categoryId = file.name()
|
||||
const category: Category = this.categoriesKeyById.get(categoryId)
|
||||
|
||||
items.add([
|
||||
category ? category.name : 'ZZ',
|
||||
category ? category.name : 'Undefined',
|
||||
logItem.count,
|
||||
`<code>https://iptv-org.github.io/iptv/${logItem.filepath}</code>`
|
||||
])
|
||||
})
|
||||
|
||||
items = items
|
||||
.orderBy(item => item[0])
|
||||
.map(item => {
|
||||
item.shift()
|
||||
return item
|
||||
})
|
||||
|
||||
const table = new HTMLTable(items.all(), [
|
||||
{ name: 'Category' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', nowrap: true }
|
||||
])
|
||||
|
||||
const readmeStorage = new Storage(README_DIR)
|
||||
await readmeStorage.save('_categories.md', table.toString())
|
||||
}
|
||||
}
|
||||
import { HTMLTable, HTMLTableItem, LogParser, LogItem, HTMLTableColumn } from '../core'
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { LOGS_DIR, README_DIR } from '../constants'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
import { Table } from './table'
|
||||
import { data } from '../api'
|
||||
|
||||
export class CategoriesTable implements Table {
|
||||
async create() {
|
||||
const parser = new LogParser()
|
||||
const logsStorage = new Storage(LOGS_DIR)
|
||||
const generatorsLog = await logsStorage.load('generators.log')
|
||||
|
||||
let items = new Collection<HTMLTableItem>()
|
||||
parser
|
||||
.parse(generatorsLog)
|
||||
.filter((logItem: LogItem) => logItem.type === 'category')
|
||||
.forEach((logItem: LogItem) => {
|
||||
if (logItem.filepath.includes('undefined')) {
|
||||
items.add([
|
||||
'ZZ',
|
||||
'Undefined',
|
||||
logItem.count.toString(),
|
||||
`<code>https://iptv-org.github.io/iptv/${logItem.filepath}</code>`
|
||||
])
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const file = new File(logItem.filepath)
|
||||
const categoryId = file.name()
|
||||
const category: sdk.Models.Category | undefined = data.categoriesKeyById.get(categoryId)
|
||||
|
||||
if (!category) return
|
||||
|
||||
items.add([
|
||||
category.name,
|
||||
category.name,
|
||||
logItem.count.toString(),
|
||||
`<code>https://iptv-org.github.io/iptv/${logItem.filepath}</code>`
|
||||
])
|
||||
})
|
||||
|
||||
items = items
|
||||
.sortBy(item => item[0])
|
||||
.map(item => {
|
||||
item.shift()
|
||||
return item
|
||||
})
|
||||
|
||||
const columns = new Collection<HTMLTableColumn>([
|
||||
{ name: 'Category' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', nowrap: true }
|
||||
])
|
||||
|
||||
const table = new HTMLTable(items, columns)
|
||||
|
||||
const readmeStorage = new Storage(README_DIR)
|
||||
await readmeStorage.save('_categories.md', table.toString())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,189 +1,176 @@
|
||||
import { Storage, Collection, Dictionary } from '@freearhey/core'
|
||||
import { City, Country, Subdivision } from '../models'
|
||||
import { LOGS_DIR, README_DIR } from '../constants'
|
||||
import { LogParser, LogItem } from '../core'
|
||||
import { Table } from './table'
|
||||
|
||||
type CountriesTableProps = {
|
||||
countriesKeyByCode: Dictionary
|
||||
subdivisionsKeyByCode: Dictionary
|
||||
countries: Collection
|
||||
subdivisions: Collection
|
||||
cities: Collection
|
||||
}
|
||||
|
||||
export class CountriesTable implements Table {
|
||||
countriesKeyByCode: Dictionary
|
||||
subdivisionsKeyByCode: Dictionary
|
||||
countries: Collection
|
||||
subdivisions: Collection
|
||||
cities: Collection
|
||||
|
||||
constructor({
|
||||
countriesKeyByCode,
|
||||
subdivisionsKeyByCode,
|
||||
countries,
|
||||
subdivisions,
|
||||
cities
|
||||
}: CountriesTableProps) {
|
||||
this.countriesKeyByCode = countriesKeyByCode
|
||||
this.subdivisionsKeyByCode = subdivisionsKeyByCode
|
||||
this.countries = countries
|
||||
this.subdivisions = subdivisions
|
||||
this.cities = cities
|
||||
}
|
||||
|
||||
async make() {
|
||||
const parser = new LogParser()
|
||||
const logsStorage = new Storage(LOGS_DIR)
|
||||
const generatorsLog = await logsStorage.load('generators.log')
|
||||
const parsed = parser.parse(generatorsLog)
|
||||
const logCountries = parsed.filter((logItem: LogItem) => logItem.type === 'country')
|
||||
const logSubdivisions = parsed.filter((logItem: LogItem) => logItem.type === 'subdivision')
|
||||
const logCities = parsed.filter((logItem: LogItem) => logItem.type === 'city')
|
||||
|
||||
let items = new Collection()
|
||||
this.countries.forEach((country: Country) => {
|
||||
const countriesLogItem = logCountries.find(
|
||||
(logItem: LogItem) => logItem.filepath === `countries/${country.code.toLowerCase()}.m3u`
|
||||
)
|
||||
|
||||
const countryItem = {
|
||||
index: country.name,
|
||||
count: 0,
|
||||
link: `https://iptv-org.github.io/iptv/countries/${country.code.toLowerCase()}.m3u`,
|
||||
name: `${country.flag} ${country.name}`,
|
||||
children: new Collection()
|
||||
}
|
||||
|
||||
if (countriesLogItem) {
|
||||
countryItem.count = countriesLogItem.count
|
||||
}
|
||||
|
||||
const countrySubdivisions = this.subdivisions.filter(
|
||||
(subdivision: Subdivision) => subdivision.countryCode === country.code
|
||||
)
|
||||
const countryCities = this.cities.filter((city: City) => city.countryCode === country.code)
|
||||
if (countrySubdivisions.notEmpty()) {
|
||||
this.subdivisions.forEach((subdivision: Subdivision) => {
|
||||
if (subdivision.countryCode !== country.code) return
|
||||
const subdivisionCities = countryCities.filter(
|
||||
(city: City) =>
|
||||
(city.subdivisionCode && city.subdivisionCode === subdivision.code) ||
|
||||
city.countryCode === subdivision.countryCode
|
||||
)
|
||||
const subdivisionsLogItem = logSubdivisions.find(
|
||||
(logItem: LogItem) =>
|
||||
logItem.filepath === `subdivisions/${subdivision.code.toLowerCase()}.m3u`
|
||||
)
|
||||
|
||||
const subdivisionItem = {
|
||||
index: subdivision.name,
|
||||
name: subdivision.name,
|
||||
count: 0,
|
||||
link: `https://iptv-org.github.io/iptv/subdivisions/${subdivision.code.toLowerCase()}.m3u`,
|
||||
children: new Collection()
|
||||
}
|
||||
|
||||
if (subdivisionsLogItem) {
|
||||
subdivisionItem.count = subdivisionsLogItem.count
|
||||
}
|
||||
|
||||
subdivisionCities.forEach((city: City) => {
|
||||
if (city.countryCode !== country.code || city.subdivisionCode !== subdivision.code)
|
||||
return
|
||||
const citiesLogItem = logCities.find(
|
||||
(logItem: LogItem) => logItem.filepath === `cities/${city.code.toLowerCase()}.m3u`
|
||||
)
|
||||
|
||||
if (!citiesLogItem) return
|
||||
|
||||
subdivisionItem.children.add({
|
||||
index: city.name,
|
||||
name: city.name,
|
||||
count: citiesLogItem.count,
|
||||
link: `https://iptv-org.github.io/iptv/${citiesLogItem.filepath}`
|
||||
})
|
||||
})
|
||||
|
||||
if (subdivisionItem.count > 0 || subdivisionItem.children.notEmpty()) {
|
||||
countryItem.children.add(subdivisionItem)
|
||||
}
|
||||
})
|
||||
} else if (countryCities.notEmpty()) {
|
||||
countryCities.forEach((city: City) => {
|
||||
const citiesLogItem = logCities.find(
|
||||
(logItem: LogItem) => logItem.filepath === `cities/${city.code.toLowerCase()}.m3u`
|
||||
)
|
||||
|
||||
if (!citiesLogItem) return
|
||||
|
||||
countryItem.children.add({
|
||||
index: city.name,
|
||||
name: city.name,
|
||||
count: citiesLogItem.count,
|
||||
link: `https://iptv-org.github.io/iptv/${citiesLogItem.filepath}`,
|
||||
children: new Collection()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
if (countryItem.count > 0 || countryItem.children.notEmpty()) {
|
||||
items.add(countryItem)
|
||||
}
|
||||
})
|
||||
|
||||
const internationalLogItem = logCountries.find(
|
||||
(logItem: LogItem) => logItem.filepath === 'countries/int.m3u'
|
||||
)
|
||||
|
||||
if (internationalLogItem) {
|
||||
items.push({
|
||||
index: 'ZZ',
|
||||
name: '🌐 International',
|
||||
count: internationalLogItem.count,
|
||||
link: `https://iptv-org.github.io/iptv/${internationalLogItem.filepath}`,
|
||||
children: new Collection()
|
||||
})
|
||||
}
|
||||
|
||||
const undefinedLogItem = logCountries.find(
|
||||
(logItem: LogItem) => logItem.filepath === 'countries/undefined.m3u'
|
||||
)
|
||||
|
||||
if (undefinedLogItem) {
|
||||
items.push({
|
||||
index: 'ZZZ',
|
||||
name: 'Undefined',
|
||||
count: undefinedLogItem.count,
|
||||
link: `https://iptv-org.github.io/iptv/${undefinedLogItem.filepath}`,
|
||||
children: new Collection()
|
||||
})
|
||||
}
|
||||
|
||||
items = items.orderBy(item => item.index)
|
||||
|
||||
const output = items
|
||||
.map(item => {
|
||||
let row = `- ${item.name} <code>${item.link}</code>`
|
||||
|
||||
item.children
|
||||
.orderBy(item => item.index)
|
||||
.forEach(item => {
|
||||
row += `\r\n - ${item.name} <code>${item.link}</code>`
|
||||
|
||||
item.children
|
||||
.orderBy(item => item.index)
|
||||
.forEach(item => {
|
||||
row += `\r\n - ${item.name} <code>${item.link}</code>`
|
||||
})
|
||||
})
|
||||
|
||||
return row
|
||||
})
|
||||
.join('\r\n')
|
||||
|
||||
const readmeStorage = new Storage(README_DIR)
|
||||
await readmeStorage.save('_countries.md', output)
|
||||
}
|
||||
}
|
||||
import { LOGS_DIR, README_DIR } from '../constants'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { LogParser, LogItem } from '../core'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
import { Table } from './table'
|
||||
import { data } from '../api'
|
||||
|
||||
type ListItem = {
|
||||
index: string
|
||||
count: number
|
||||
link: string
|
||||
name: string
|
||||
children: Collection<ListItem>
|
||||
}
|
||||
|
||||
export class CountriesTable implements Table {
|
||||
async create() {
|
||||
const parser = new LogParser()
|
||||
const logsStorage = new Storage(LOGS_DIR)
|
||||
const generatorsLog = await logsStorage.load('generators.log')
|
||||
const parsed = parser.parse(generatorsLog)
|
||||
const logCountries = parsed.filter((logItem: LogItem) => logItem.type === 'country')
|
||||
const logSubdivisions = parsed.filter((logItem: LogItem) => logItem.type === 'subdivision')
|
||||
const logCities = parsed.filter((logItem: LogItem) => logItem.type === 'city')
|
||||
|
||||
let items = new Collection()
|
||||
data.countries.forEach((country: sdk.Models.Country) => {
|
||||
const countryCode = country.code
|
||||
const countriesLogItem = logCountries.find(
|
||||
(logItem: LogItem) => logItem.filepath === `countries/${countryCode.toLowerCase()}.m3u`
|
||||
)
|
||||
|
||||
const countryItem: ListItem = {
|
||||
index: country.name,
|
||||
count: 0,
|
||||
link: `https://iptv-org.github.io/iptv/countries/${countryCode.toLowerCase()}.m3u`,
|
||||
name: `${country.flag} ${country.name}`,
|
||||
children: new Collection()
|
||||
}
|
||||
|
||||
if (countriesLogItem) {
|
||||
countryItem.count = countriesLogItem.count
|
||||
}
|
||||
|
||||
const countrySubdivisions = data.subdivisions.filter(
|
||||
(subdivision: sdk.Models.Subdivision) => subdivision.country === countryCode
|
||||
)
|
||||
const countryCities = data.cities.filter(
|
||||
(city: sdk.Models.City) => city.country === countryCode
|
||||
)
|
||||
if (countrySubdivisions.isNotEmpty()) {
|
||||
data.subdivisions.forEach((subdivision: sdk.Models.Subdivision) => {
|
||||
if (subdivision.country !== countryCode) return
|
||||
|
||||
const subdivisionCode = subdivision.code
|
||||
const subdivisionCities = countryCities.filter(
|
||||
(city: sdk.Models.City) =>
|
||||
(city.subdivision && city.subdivision === subdivisionCode) ||
|
||||
city.country === subdivision.country
|
||||
)
|
||||
const subdivisionsLogItem = logSubdivisions.find(
|
||||
(logItem: LogItem) =>
|
||||
logItem.filepath === `subdivisions/${subdivisionCode.toLowerCase()}.m3u`
|
||||
)
|
||||
|
||||
const subdivisionItem: ListItem = {
|
||||
index: subdivision.name,
|
||||
name: subdivision.name,
|
||||
count: 0,
|
||||
link: `https://iptv-org.github.io/iptv/subdivisions/${subdivisionCode.toLowerCase()}.m3u`,
|
||||
children: new Collection<ListItem>()
|
||||
}
|
||||
|
||||
if (subdivisionsLogItem) {
|
||||
subdivisionItem.count = subdivisionsLogItem.count
|
||||
}
|
||||
|
||||
subdivisionCities.forEach((city: sdk.Models.City) => {
|
||||
if (city.country !== countryCode || city.subdivision !== subdivisionCode) return
|
||||
const citiesLogItem = logCities.find(
|
||||
(logItem: LogItem) => logItem.filepath === `cities/${city.code.toLowerCase()}.m3u`
|
||||
)
|
||||
|
||||
if (!citiesLogItem) return
|
||||
|
||||
subdivisionItem.children.add({
|
||||
index: city.name,
|
||||
name: city.name,
|
||||
count: citiesLogItem.count,
|
||||
link: `https://iptv-org.github.io/iptv/${citiesLogItem.filepath}`,
|
||||
children: new Collection<ListItem>()
|
||||
})
|
||||
})
|
||||
|
||||
if (subdivisionItem.count > 0 || subdivisionItem.children.isNotEmpty()) {
|
||||
countryItem.children.add(subdivisionItem)
|
||||
}
|
||||
})
|
||||
} else if (countryCities.isNotEmpty()) {
|
||||
countryCities.forEach((city: sdk.Models.City) => {
|
||||
const citiesLogItem = logCities.find(
|
||||
(logItem: LogItem) => logItem.filepath === `cities/${city.code.toLowerCase()}.m3u`
|
||||
)
|
||||
|
||||
if (!citiesLogItem) return
|
||||
|
||||
countryItem.children.add({
|
||||
index: city.name,
|
||||
name: city.name,
|
||||
count: citiesLogItem.count,
|
||||
link: `https://iptv-org.github.io/iptv/${citiesLogItem.filepath}`,
|
||||
children: new Collection()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
if (countryItem.count > 0 || countryItem.children.isNotEmpty()) {
|
||||
items.add(countryItem)
|
||||
}
|
||||
})
|
||||
|
||||
const internationalLogItem = logCountries.find(
|
||||
(logItem: LogItem) => logItem.filepath === 'countries/int.m3u'
|
||||
)
|
||||
|
||||
if (internationalLogItem) {
|
||||
items.add({
|
||||
index: 'ZZ',
|
||||
name: '🌐 International',
|
||||
count: internationalLogItem.count,
|
||||
link: `https://iptv-org.github.io/iptv/${internationalLogItem.filepath}`,
|
||||
children: new Collection()
|
||||
})
|
||||
}
|
||||
|
||||
const undefinedLogItem = logCountries.find(
|
||||
(logItem: LogItem) => logItem.filepath === 'countries/undefined.m3u'
|
||||
)
|
||||
|
||||
if (undefinedLogItem) {
|
||||
items.add({
|
||||
index: 'ZZZ',
|
||||
name: 'Undefined',
|
||||
count: undefinedLogItem.count,
|
||||
link: `https://iptv-org.github.io/iptv/${undefinedLogItem.filepath}`,
|
||||
children: new Collection()
|
||||
})
|
||||
}
|
||||
|
||||
items = items.sortBy(item => item.index)
|
||||
|
||||
const output = items
|
||||
.map((item: ListItem) => {
|
||||
let row = `- ${item.name} <code>${item.link}</code>`
|
||||
|
||||
item.children
|
||||
.sortBy((item: ListItem) => item.index)
|
||||
.forEach((item: ListItem) => {
|
||||
row += `\r\n - ${item.name} <code>${item.link}</code>`
|
||||
|
||||
item.children
|
||||
.sortBy((item: ListItem) => item.index)
|
||||
.forEach((item: ListItem) => {
|
||||
row += `\r\n - ${item.name} <code>${item.link}</code>`
|
||||
})
|
||||
})
|
||||
|
||||
return row
|
||||
})
|
||||
.join('\r\n')
|
||||
|
||||
const readmeStorage = new Storage(README_DIR)
|
||||
await readmeStorage.save('_countries.md', output)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
export * from './categoriesTable'
|
||||
export * from './countriesTable'
|
||||
export * from './languagesTable'
|
||||
export * from './regionsTable'
|
||||
export * from './categoriesTable'
|
||||
export * from './countriesTable'
|
||||
export * from './languagesTable'
|
||||
export * from './regionsTable'
|
||||
|
||||
@@ -1,56 +1,63 @@
|
||||
import { Storage, Collection, File, Dictionary } from '@freearhey/core'
|
||||
import { HTMLTable, LogParser, LogItem } from '../core'
|
||||
import { LOGS_DIR, README_DIR } from '../constants'
|
||||
import { Language } from '../models'
|
||||
import { Table } from './table'
|
||||
|
||||
type LanguagesTableProps = {
|
||||
languagesKeyByCode: Dictionary
|
||||
}
|
||||
|
||||
export class LanguagesTable implements Table {
|
||||
languagesKeyByCode: Dictionary
|
||||
|
||||
constructor({ languagesKeyByCode }: LanguagesTableProps) {
|
||||
this.languagesKeyByCode = languagesKeyByCode
|
||||
}
|
||||
|
||||
async make() {
|
||||
const parser = new LogParser()
|
||||
const logsStorage = new Storage(LOGS_DIR)
|
||||
const generatorsLog = await logsStorage.load('generators.log')
|
||||
|
||||
let data = new Collection()
|
||||
parser
|
||||
.parse(generatorsLog)
|
||||
.filter((logItem: LogItem) => logItem.type === 'language')
|
||||
.forEach((logItem: LogItem) => {
|
||||
const file = new File(logItem.filepath)
|
||||
const languageCode = file.name()
|
||||
const language: Language = this.languagesKeyByCode.get(languageCode)
|
||||
|
||||
data.add([
|
||||
language ? language.name : 'ZZ',
|
||||
language ? language.name : 'Undefined',
|
||||
logItem.count,
|
||||
`<code>https://iptv-org.github.io/iptv/${logItem.filepath}</code>`
|
||||
])
|
||||
})
|
||||
|
||||
data = data
|
||||
.orderBy(item => item[0])
|
||||
.map(item => {
|
||||
item.shift()
|
||||
return item
|
||||
})
|
||||
|
||||
const table = new HTMLTable(data.all(), [
|
||||
{ name: 'Language', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left', nowrap: true }
|
||||
])
|
||||
|
||||
const readmeStorage = new Storage(README_DIR)
|
||||
await readmeStorage.save('_languages.md', table.toString())
|
||||
}
|
||||
}
|
||||
import { HTMLTable, LogParser, LogItem, HTMLTableColumn, HTMLTableItem } from '../core'
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { LOGS_DIR, README_DIR } from '../constants'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
import { Table } from './table'
|
||||
import { data } from '../api'
|
||||
|
||||
export class LanguagesTable implements Table {
|
||||
async create() {
|
||||
const parser = new LogParser()
|
||||
const logsStorage = new Storage(LOGS_DIR)
|
||||
const generatorsLog = await logsStorage.load('generators.log')
|
||||
|
||||
let items = new Collection<HTMLTableItem>()
|
||||
parser
|
||||
.parse(generatorsLog)
|
||||
.filter((logItem: LogItem) => logItem.type === 'language')
|
||||
.forEach((logItem: LogItem) => {
|
||||
if (logItem.filepath.includes('undefined')) {
|
||||
items.add([
|
||||
'ZZ',
|
||||
'Undefined',
|
||||
logItem.count.toString(),
|
||||
`<code>https://iptv-org.github.io/iptv/${logItem.filepath}</code>`
|
||||
])
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const file = new File(logItem.filepath)
|
||||
const languageCode = file.name()
|
||||
const language: sdk.Models.Language | undefined = data.languagesKeyByCode.get(languageCode)
|
||||
|
||||
if (!language) return
|
||||
|
||||
items.add([
|
||||
language.name,
|
||||
language.name,
|
||||
logItem.count.toString(),
|
||||
`<code>https://iptv-org.github.io/iptv/${logItem.filepath}</code>`
|
||||
])
|
||||
})
|
||||
|
||||
items = items
|
||||
.sortBy(item => item[0])
|
||||
.map(item => {
|
||||
item.shift()
|
||||
return item
|
||||
})
|
||||
|
||||
const columns = new Collection<HTMLTableColumn>([
|
||||
{ name: 'Language', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left', nowrap: true }
|
||||
])
|
||||
|
||||
const table = new HTMLTable(items, columns)
|
||||
|
||||
const readmeStorage = new Storage(README_DIR)
|
||||
await readmeStorage.save('_languages.md', table.toString())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,52 +1,49 @@
|
||||
import { Storage, Collection } from '@freearhey/core'
|
||||
import { LogParser, LogItem } from '../core'
|
||||
import { LOGS_DIR, README_DIR } from '../constants'
|
||||
import { Region } from '../models'
|
||||
import { Table } from './table'
|
||||
|
||||
type RegionsTableProps = {
|
||||
regions: Collection
|
||||
}
|
||||
|
||||
export class RegionsTable implements Table {
|
||||
regions: Collection
|
||||
|
||||
constructor({ regions }: RegionsTableProps) {
|
||||
this.regions = regions
|
||||
}
|
||||
|
||||
async make() {
|
||||
const parser = new LogParser()
|
||||
const logsStorage = new Storage(LOGS_DIR)
|
||||
const generatorsLog = await logsStorage.load('generators.log')
|
||||
const parsed = parser.parse(generatorsLog)
|
||||
const logRegions = parsed.filter((logItem: LogItem) => logItem.type === 'region')
|
||||
|
||||
let items = new Collection()
|
||||
this.regions.forEach((region: Region) => {
|
||||
const logItem = logRegions.find(
|
||||
(logItem: LogItem) => logItem.filepath === `regions/${region.code.toLowerCase()}.m3u`
|
||||
)
|
||||
|
||||
if (!logItem) return
|
||||
|
||||
items.add({
|
||||
index: region.name,
|
||||
name: region.name,
|
||||
count: logItem.count,
|
||||
link: `https://iptv-org.github.io/iptv/${logItem.filepath}`
|
||||
})
|
||||
})
|
||||
|
||||
items = items.orderBy(item => item.index)
|
||||
|
||||
const output = items
|
||||
.map(item => {
|
||||
return `- ${item.name} <code>${item.link}</code>`
|
||||
})
|
||||
.join('\r\n')
|
||||
|
||||
const readmeStorage = new Storage(README_DIR)
|
||||
await readmeStorage.save('_regions.md', output)
|
||||
}
|
||||
}
|
||||
import { LOGS_DIR, README_DIR } from '../constants'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { LogParser, LogItem } from '../core'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
import { Table } from './table'
|
||||
import { data } from '../api'
|
||||
|
||||
type ListItem = {
|
||||
name: string
|
||||
count: number
|
||||
link: string
|
||||
}
|
||||
|
||||
export class RegionsTable implements Table {
|
||||
async create() {
|
||||
const parser = new LogParser()
|
||||
const logsStorage = new Storage(LOGS_DIR)
|
||||
const generatorsLog = await logsStorage.load('generators.log')
|
||||
const parsed = parser.parse(generatorsLog)
|
||||
const logRegions = parsed.filter((logItem: LogItem) => logItem.type === 'region')
|
||||
|
||||
let items = new Collection<ListItem>()
|
||||
data.regions.forEach((region: sdk.Models.Region) => {
|
||||
const logItem = logRegions.find(
|
||||
(logItem: LogItem) => logItem.filepath === `regions/${region.code.toLowerCase()}.m3u`
|
||||
)
|
||||
|
||||
if (!logItem) return
|
||||
|
||||
items.add({
|
||||
name: region.name,
|
||||
count: logItem.count,
|
||||
link: `https://iptv-org.github.io/iptv/${logItem.filepath}`
|
||||
})
|
||||
})
|
||||
|
||||
items = items.sortBy(item => item.name)
|
||||
|
||||
const output = items
|
||||
.map(item => {
|
||||
return `- ${item.name} <code>${item.link}</code>`
|
||||
})
|
||||
.join('\r\n')
|
||||
|
||||
const readmeStorage = new Storage(README_DIR)
|
||||
await readmeStorage.save('_regions.md', output)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
export interface Table {
|
||||
make(): void
|
||||
}
|
||||
export interface Table {
|
||||
create(): void
|
||||
}
|
||||
|
||||
5
scripts/types/blocklistRecord.d.ts
vendored
5
scripts/types/blocklistRecord.d.ts
vendored
@@ -1,5 +0,0 @@
|
||||
export type BlocklistRecordData = {
|
||||
channel: string
|
||||
reason: string
|
||||
ref: string
|
||||
}
|
||||
9
scripts/types/category.d.ts
vendored
9
scripts/types/category.d.ts
vendored
@@ -1,9 +0,0 @@
|
||||
export type CategorySerializedData = {
|
||||
id: string
|
||||
name: string
|
||||
}
|
||||
|
||||
export type CategoryData = {
|
||||
id: string
|
||||
name: string
|
||||
}
|
||||
50
scripts/types/channel.d.ts
vendored
50
scripts/types/channel.d.ts
vendored
@@ -1,50 +0,0 @@
|
||||
import { Collection } from '@freearhey/core'
|
||||
import type { CountrySerializedData } from './country'
|
||||
import type { SubdivisionSerializedData } from './subdivision'
|
||||
import type { CategorySerializedData } from './category'
|
||||
|
||||
export type ChannelSerializedData = {
|
||||
id: string
|
||||
name: string
|
||||
altNames: string[]
|
||||
network?: string
|
||||
owners: string[]
|
||||
countryCode: string
|
||||
country?: CountrySerializedData
|
||||
subdivisionCode?: string
|
||||
subdivision?: SubdivisionSerializedData
|
||||
cityName?: string
|
||||
categoryIds: string[]
|
||||
categories?: CategorySerializedData[]
|
||||
isNSFW: boolean
|
||||
launched?: string
|
||||
closed?: string
|
||||
replacedBy?: string
|
||||
website?: string
|
||||
}
|
||||
|
||||
export type ChannelData = {
|
||||
id: string
|
||||
name: string
|
||||
alt_names: string[]
|
||||
network: string
|
||||
owners: Collection
|
||||
country: string
|
||||
subdivision: string
|
||||
city: string
|
||||
categories: Collection
|
||||
is_nsfw: boolean
|
||||
launched: string
|
||||
closed: string
|
||||
replaced_by: string
|
||||
website: string
|
||||
}
|
||||
|
||||
export type ChannelSearchableData = {
|
||||
id: string
|
||||
name: string
|
||||
altNames: string[]
|
||||
guideNames: string[]
|
||||
streamTitles: string[]
|
||||
feedFullNames: string[]
|
||||
}
|
||||
20
scripts/types/city.d.ts
vendored
20
scripts/types/city.d.ts
vendored
@@ -1,20 +0,0 @@
|
||||
import { CountrySerializedData } from './country'
|
||||
import { SubdivisionSerializedData } from './subdivision'
|
||||
|
||||
export type CitySerializedData = {
|
||||
code: string
|
||||
name: string
|
||||
countryCode: string
|
||||
country?: CountrySerializedData
|
||||
subdivisionCode: string | null
|
||||
subdivision?: SubdivisionSerializedData
|
||||
wikidataId: string
|
||||
}
|
||||
|
||||
export type CityData = {
|
||||
code: string
|
||||
name: string
|
||||
country: string
|
||||
subdivision: string | null
|
||||
wikidata_id: string
|
||||
}
|
||||
20
scripts/types/country.d.ts
vendored
20
scripts/types/country.d.ts
vendored
@@ -1,20 +0,0 @@
|
||||
import type { LanguageSerializedData } from './language'
|
||||
import type { SubdivisionSerializedData } from './subdivision'
|
||||
import type { RegionSerializedData } from './region'
|
||||
|
||||
export type CountrySerializedData = {
|
||||
code: string
|
||||
name: string
|
||||
flag: string
|
||||
languageCode: string
|
||||
language: LanguageSerializedData | null
|
||||
subdivisions: SubdivisionSerializedData[]
|
||||
regions: RegionSerializedData[]
|
||||
}
|
||||
|
||||
export type CountryData = {
|
||||
code: string
|
||||
name: string
|
||||
lang: string
|
||||
flag: string
|
||||
}
|
||||
21
scripts/types/dataLoader.d.ts
vendored
21
scripts/types/dataLoader.d.ts
vendored
@@ -1,21 +0,0 @@
|
||||
import { Storage } from '@freearhey/core'
|
||||
|
||||
export type DataLoaderProps = {
|
||||
storage: Storage
|
||||
}
|
||||
|
||||
export type DataLoaderData = {
|
||||
countries: object | object[]
|
||||
regions: object | object[]
|
||||
subdivisions: object | object[]
|
||||
languages: object | object[]
|
||||
categories: object | object[]
|
||||
blocklist: object | object[]
|
||||
channels: object | object[]
|
||||
feeds: object | object[]
|
||||
logos: object | object[]
|
||||
timezones: object | object[]
|
||||
guides: object | object[]
|
||||
streams: object | object[]
|
||||
cities: object | object[]
|
||||
}
|
||||
31
scripts/types/dataProcessor.d.ts
vendored
31
scripts/types/dataProcessor.d.ts
vendored
@@ -1,31 +0,0 @@
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
|
||||
export type DataProcessorData = {
|
||||
blocklistRecordsGroupedByChannelId: Dictionary
|
||||
subdivisionsGroupedByCountryCode: Dictionary
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
guidesGroupedByStreamId: Dictionary
|
||||
logosGroupedByStreamId: Dictionary
|
||||
subdivisionsKeyByCode: Dictionary
|
||||
countriesKeyByCode: Dictionary
|
||||
languagesKeyByCode: Dictionary
|
||||
streamsGroupedById: Dictionary
|
||||
categoriesKeyById: Dictionary
|
||||
timezonesKeyById: Dictionary
|
||||
regionsKeyByCode: Dictionary
|
||||
blocklistRecords: Collection
|
||||
channelsKeyById: Dictionary
|
||||
citiesKeyByCode: Dictionary
|
||||
subdivisions: Collection
|
||||
categories: Collection
|
||||
countries: Collection
|
||||
languages: Collection
|
||||
timezones: Collection
|
||||
channels: Collection
|
||||
regions: Collection
|
||||
streams: Collection
|
||||
cities: Collection
|
||||
guides: Collection
|
||||
feeds: Collection
|
||||
logos: Collection
|
||||
}
|
||||
10
scripts/types/feed.d.ts
vendored
10
scripts/types/feed.d.ts
vendored
@@ -1,10 +0,0 @@
|
||||
export type FeedData = {
|
||||
channel: string
|
||||
id: string
|
||||
name: string
|
||||
is_main: boolean
|
||||
broadcast_area: string[]
|
||||
languages: string[]
|
||||
timezones: string[]
|
||||
video_format: string
|
||||
}
|
||||
17
scripts/types/guide.d.ts
vendored
17
scripts/types/guide.d.ts
vendored
@@ -1,17 +0,0 @@
|
||||
export type GuideSerializedData = {
|
||||
channelId?: string
|
||||
feedId?: string
|
||||
siteDomain: string
|
||||
siteId: string
|
||||
siteName: string
|
||||
languageCode: string
|
||||
}
|
||||
|
||||
export type GuideData = {
|
||||
channel: string
|
||||
feed: string
|
||||
site: string
|
||||
site_id: string
|
||||
site_name: string
|
||||
lang: string
|
||||
}
|
||||
9
scripts/types/language.d.ts
vendored
9
scripts/types/language.d.ts
vendored
@@ -1,9 +0,0 @@
|
||||
export type LanguageSerializedData = {
|
||||
code: string
|
||||
name: string
|
||||
}
|
||||
|
||||
export type LanguageData = {
|
||||
code: string
|
||||
name: string
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user