Compare commits
167 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
1a2e205c1f | ||
![]() |
5dd04cb8ab | ||
![]() |
62d05e5e08 | ||
![]() |
1c087ec856 | ||
![]() |
010c12da67 | ||
![]() |
9bdac38561 | ||
![]() |
790ca9c90a | ||
![]() |
58f72d2d9c | ||
![]() |
285e6513ed | ||
![]() |
412bc8cf2d | ||
![]() |
45cd8b8a00 | ||
![]() |
ae2227959e | ||
![]() |
b50c92f919 | ||
![]() |
93a1d9c164 | ||
![]() |
0b10e68c60 | ||
![]() |
73ac4076ac | ||
![]() |
5968b82a0b | ||
![]() |
ce1d2a0fd9 | ||
![]() |
de3f813b46 | ||
![]() |
4797b1a3b7 | ||
![]() |
3e996d284d | ||
![]() |
420c5a0836 | ||
![]() |
c6b953055a | ||
![]() |
1cd0c112a6 | ||
![]() |
492d28ea37 | ||
![]() |
4eb7e03b67 | ||
![]() |
e029f329eb | ||
![]() |
47de9a752c | ||
![]() |
51c9aa2887 | ||
![]() |
82499a53d4 | ||
![]() |
df15302f2c | ||
![]() |
039b51262d | ||
![]() |
465add46d4 | ||
![]() |
bce965b402 | ||
![]() |
95ce293169 | ||
![]() |
5d604c2cad | ||
![]() |
ed2d3ca277 | ||
![]() |
0478f40d02 | ||
![]() |
a4be73da3b | ||
![]() |
762192518f | ||
![]() |
fa51df192d | ||
![]() |
1a5cc02097 | ||
![]() |
a07f54ca33 | ||
![]() |
6a8cbe92a9 | ||
![]() |
16d9376ec9 | ||
![]() |
4356f5c72a | ||
![]() |
076dc94292 | ||
![]() |
fbc527010a | ||
![]() |
5b4a22276d | ||
![]() |
b55a563fce | ||
![]() |
2a701a6dfe | ||
![]() |
8931fb4758 | ||
![]() |
2124165319 | ||
![]() |
0d701129a0 | ||
![]() |
ebd8625e1e | ||
![]() |
b68ca67386 | ||
![]() |
17a7019c60 | ||
![]() |
54af92251c | ||
![]() |
d9edeb747d | ||
![]() |
b69b722a37 | ||
![]() |
669c23ea09 | ||
![]() |
2b3ba8e7fa | ||
![]() |
9a761e7d30 | ||
![]() |
9d00e052f0 | ||
![]() |
7c159e97de | ||
![]() |
ba8e4ff33c | ||
![]() |
9b067a437c | ||
![]() |
aba39d06bf | ||
![]() |
469d22a833 | ||
![]() |
43bd49ce5b | ||
![]() |
79dc190ccc | ||
![]() |
495659e9cd | ||
![]() |
2fec2c9e4c | ||
![]() |
9cba66634d | ||
![]() |
b2f63bf231 | ||
![]() |
9c9ef22730 | ||
![]() |
5d84ec3be2 | ||
![]() |
2150961d27 | ||
![]() |
53bca5a3d3 | ||
![]() |
cd3938eb33 | ||
![]() |
eb0b88bfcf | ||
![]() |
b9bbf8bbca | ||
![]() |
65b3d0c0de | ||
![]() |
93b8f32f68 | ||
![]() |
28bb164e8e | ||
![]() |
4911cc76a3 | ||
![]() |
26ac539bc4 | ||
![]() |
75ae6b16a4 | ||
![]() |
2835b1d28f | ||
![]() |
748aad16d7 | ||
![]() |
2c2fbb8583 | ||
![]() |
20edcbf7fa | ||
![]() |
db81dc39ba | ||
![]() |
c3b0aef1ef | ||
![]() |
50e29efdfe | ||
![]() |
285e41bc88 | ||
![]() |
ea9d0fc449 | ||
![]() |
9cdd2eef81 | ||
![]() |
2f8833236a | ||
![]() |
2b680eeb6d | ||
![]() |
809f120db0 | ||
![]() |
6d9ef8bbc3 | ||
![]() |
a26d6ec6bb | ||
![]() |
2d26ced3fc | ||
![]() |
d74cd4bf24 | ||
![]() |
f040d897a7 | ||
![]() |
ed2f87f57b | ||
![]() |
9b9e31f54c | ||
![]() |
b3cfcf660e | ||
![]() |
e5bcd1f94e | ||
![]() |
2b6fa769f7 | ||
![]() |
3ccc82f343 | ||
![]() |
f4273cafb6 | ||
![]() |
59d63f61d9 | ||
![]() |
9d9103a83b | ||
![]() |
0b085b6d03 | ||
![]() |
f77538f179 | ||
![]() |
f7810f7f95 | ||
![]() |
4d28e4603f | ||
![]() |
8b787e4ae0 | ||
![]() |
f5ba168172 | ||
![]() |
1df6dadbdd | ||
![]() |
3dc29144a3 | ||
![]() |
951167ce17 | ||
![]() |
906e4055d8 | ||
![]() |
2f5526388a | ||
![]() |
82341642f4 | ||
![]() |
c96b1eb09d | ||
![]() |
f5bfa67c69 | ||
![]() |
47797ffcd4 | ||
![]() |
a73053e380 | ||
![]() |
bc042fead7 | ||
![]() |
ed6779e937 | ||
![]() |
ee7ca68f87 | ||
![]() |
32693b6378 | ||
![]() |
984e5588c8 | ||
![]() |
a42a1af867 | ||
![]() |
03de680915 | ||
![]() |
8c6e142314 | ||
![]() |
b12bde4f79 | ||
![]() |
1120aa3841 | ||
![]() |
652ca73126 | ||
![]() |
8706e72f6a | ||
![]() |
319d521773 | ||
![]() |
d9474cdcc5 | ||
![]() |
e49a34177a | ||
![]() |
67d203e011 | ||
![]() |
0d38b3de16 | ||
![]() |
38116a14f3 | ||
![]() |
b28f0b65f0 | ||
![]() |
13ab4a9363 | ||
![]() |
7cb7783a34 | ||
![]() |
d1a13dad38 | ||
![]() |
b4e06dea99 | ||
![]() |
0f92dc0fdf | ||
![]() |
6a58895d37 | ||
![]() |
1709a2b7df | ||
![]() |
febb3da0c1 | ||
![]() |
552a428985 | ||
![]() |
38e04bd42a | ||
![]() |
8f0ba5ba4f | ||
![]() |
c67aedceb1 | ||
![]() |
b3a7fbd9b5 | ||
![]() |
29522428de | ||
![]() |
77bd52b2ae | ||
![]() |
d8112e7628 | ||
![]() |
cb5053476d |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -15,6 +15,7 @@
|
||||
version.lock
|
||||
logs/*
|
||||
cache/*
|
||||
*.mmdb
|
||||
|
||||
# HTTPS Cert/Key #
|
||||
##################
|
||||
|
288
API.md
288
API.md
@@ -169,6 +169,10 @@ Return the api docs formatted with markdown.
|
||||
Download the PlexPy log file.
|
||||
|
||||
|
||||
### download_plex_log
|
||||
Download the Plex log file.
|
||||
|
||||
|
||||
### edit_library
|
||||
Update a library section on PlexPy.
|
||||
|
||||
@@ -318,6 +322,34 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_geoip_lookup
|
||||
Get the geolocation info for an IP address. The GeoLite2 database must be installed.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
ip_address
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"continent": "North America",
|
||||
"country": "United States",
|
||||
"region": "California",
|
||||
"city": "Mountain View",
|
||||
"postal_code": "94035",
|
||||
"timezone": "America/Los_Angeles",
|
||||
"latitude": 37.386,
|
||||
"longitude": -122.0838,
|
||||
"accuracy": 1000
|
||||
}
|
||||
json:
|
||||
{"error": "The address 127.0.0.1 is not in the database."
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_history
|
||||
Get the PlexPy history.
|
||||
|
||||
@@ -543,6 +575,33 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_library
|
||||
Get a library's details.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"child_count": null,
|
||||
"count": 887,
|
||||
"do_notify": 1,
|
||||
"do_notify_created": 1,
|
||||
"keep_history": 1,
|
||||
"library_art": "/:/resources/movie-fanart.jpg",
|
||||
"library_thumb": "/:/resources/movie.png",
|
||||
"parent_count": null,
|
||||
"section_id": 1,
|
||||
"section_name": "Movies",
|
||||
"section_type": "movie"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_library_media_info
|
||||
Get the data on the PlexPy media info tables.
|
||||
|
||||
@@ -619,6 +678,66 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_library_user_stats
|
||||
Get a library's user statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"friendly_name": "Jon Snow",
|
||||
"total_plays": 170,
|
||||
"user_id": 133788,
|
||||
"user_thumb": "https://plex.tv/users/k10w42309cynaopq/avatar"
|
||||
},
|
||||
{"platform_type": "DanyKhaleesi69",
|
||||
"total_plays": 42,
|
||||
"user_id": 8008135,
|
||||
"user_thumb": "https://plex.tv/users/568gwwoib5t98a3a/avatar"
|
||||
},
|
||||
{...},
|
||||
{...}
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
### get_library_watch_time_stats
|
||||
Get a library's watch time statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"query_days": 1,
|
||||
"total_plays": 0,
|
||||
"total_time": 0
|
||||
},
|
||||
{"query_days": 7,
|
||||
"total_plays": 3,
|
||||
"total_time": 15694
|
||||
},
|
||||
{"query_days": 30,
|
||||
"total_plays": 35,
|
||||
"total_time": 63054
|
||||
},
|
||||
{"query_days": 0,
|
||||
"total_plays": 508,
|
||||
"total_time": 1183080
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
### get_logs
|
||||
Get the PlexPy logs.
|
||||
|
||||
@@ -653,7 +772,7 @@ Get the metadata for a media item.
|
||||
```
|
||||
Required parameters:
|
||||
rating_key (str): Rating key of the item
|
||||
media_info (bool): True or False wheter to get media info
|
||||
media_info (bool): True or False whether to get media info
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
@@ -1311,6 +1430,35 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_user
|
||||
Get a user's details.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"allow_guest": 1,
|
||||
"deleted_user": 0,
|
||||
"do_notify": 1,
|
||||
"email": "Jon.Snow.1337@CastleBlack.com",
|
||||
"friendly_name": "Jon Snow",
|
||||
"is_allow_sync": 1,
|
||||
"is_home_user": 1,
|
||||
"is_restricted": 0,
|
||||
"keep_history": 1,
|
||||
"shared_libraries": ["10", "1", "4", "5", "15", "20", "2"],
|
||||
"user_id": 133788,
|
||||
"user_thumb": "https://plex.tv/users/k10w42309cynaopq/avatar",
|
||||
"username": "LordCommanderSnow"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_user_ips
|
||||
Get the data on PlexPy users IP table.
|
||||
|
||||
@@ -1415,6 +1563,66 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_user_player_stats
|
||||
Get a user's player statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"platform_type": "Chrome",
|
||||
"player_name": "Plex Web (Chrome)",
|
||||
"result_id": 1,
|
||||
"total_plays": 170
|
||||
},
|
||||
{"platform_type": "Chromecast",
|
||||
"player_name": "Chromecast",
|
||||
"result_id": 2,
|
||||
"total_plays": 42
|
||||
},
|
||||
{...},
|
||||
{...}
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
### get_user_watch_time_stats
|
||||
Get a user's watch time statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"query_days": 1,
|
||||
"total_plays": 0,
|
||||
"total_time": 0
|
||||
},
|
||||
{"query_days": 7,
|
||||
"total_plays": 3,
|
||||
"total_time": 15694
|
||||
},
|
||||
{"query_days": 30,
|
||||
"total_plays": 35,
|
||||
"total_time": 63054
|
||||
},
|
||||
{"query_days": 0,
|
||||
"total_plays": 508,
|
||||
"total_time": 1183080
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
### get_users
|
||||
Get a list of all users that have access to your server.
|
||||
|
||||
@@ -1497,6 +1705,37 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_whois_lookup
|
||||
Get the connection info for an IP address.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
ip_address
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"host": "google-public-dns-a.google.com",
|
||||
"nets": [{"description": "Google Inc.",
|
||||
"address": "1600 Amphitheatre Parkway",
|
||||
"city": "Mountain View",
|
||||
"state": "CA",
|
||||
"postal_code": "94043",
|
||||
"country": "United States",
|
||||
...
|
||||
},
|
||||
{...}
|
||||
]
|
||||
json:
|
||||
{"host": "Not available",
|
||||
"nets": [],
|
||||
"error": "IPv4 address 127.0.0.1 is already defined as Loopback via RFC 1122, Section 3.2.1.3."
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### import_database
|
||||
Import a PlexWatch or Plexivity database into PlexPy.
|
||||
|
||||
@@ -1514,12 +1753,35 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### install_geoip_db
|
||||
Downloads and installs the GeoLite2 database
|
||||
|
||||
|
||||
### notify
|
||||
Send a notification using PlexPy.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
agent_id(str): The id of the notification agent to use
|
||||
9 # Boxcar2
|
||||
17 # Browser
|
||||
10 # Email
|
||||
16 # Facebook
|
||||
0 # Growl
|
||||
19 # Hipchat
|
||||
12 # IFTTT
|
||||
18 # Join
|
||||
4 # NotifyMyAndroid
|
||||
3 # Plex Home Theater
|
||||
1 # Prowl
|
||||
5 # Pushalot
|
||||
6 # Pushbullet
|
||||
7 # Pushover
|
||||
15 # Scripts
|
||||
14 # Slack
|
||||
13 # Telegram
|
||||
11 # Twitter
|
||||
2 # XBMC
|
||||
subject(str): The subject of the message
|
||||
body(str): The body of the message
|
||||
|
||||
@@ -1531,6 +1793,26 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### pms_image_proxy
|
||||
Gets an image from the PMS and saves it to the image cache directory.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
img (str): /library/metadata/153037/thumb/1462175060
|
||||
or
|
||||
rating_key (str): 54321
|
||||
|
||||
Optional parameters:
|
||||
width (str): 150
|
||||
height (str): 255
|
||||
fallback (str): "poster", "cover", "art"
|
||||
refresh (bool): True or False whether to refresh the image cache
|
||||
|
||||
Returns:
|
||||
None
|
||||
```
|
||||
|
||||
|
||||
### refresh_libraries_list
|
||||
Refresh the PlexPy libraries list.
|
||||
|
||||
@@ -1618,6 +1900,10 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### uninstall_geoip_db
|
||||
Uninstalls the GeoLite2 database
|
||||
|
||||
|
||||
### update
|
||||
Check for PlexPy updates on Github.
|
||||
|
||||
|
107
CHANGELOG.md
107
CHANGELOG.md
@@ -1,5 +1,112 @@
|
||||
# Changelog
|
||||
|
||||
## v1.4.13 (2016-10-08)
|
||||
|
||||
* New: Option to set the number of days to keep PlexPy backups.
|
||||
* New: Option to add a supplementary url to Pushover notifications.
|
||||
* New: Option to set a timeout duration for script notifications.
|
||||
* New: Added flush temporary sessions button to extra settings for emergency use.
|
||||
* New: Added pms_image_proxy to the API.
|
||||
* Fix: Insanely long play durations being recorded when connection to the Plex server is lost.
|
||||
* Fix: Script notification output not being sent to the logger.
|
||||
* Fix: New libraries not being added to homepage automatically.
|
||||
* Fix: Success message shown incorrectly when sending a test notification.
|
||||
* Fix: PlexPy log level filter not working.
|
||||
* Fix: Admin username not shown in login logs.
|
||||
* Fix: FeatHub link in readme document.
|
||||
* Change: Posters disabled by default for all notification agents.
|
||||
* Change: Disable manual changing of the PlexPy API key.
|
||||
* Change: Force refresh the Plex.tv token when fetching a new token.
|
||||
* Change: Script notifications run in a new thread with the timeout setting.
|
||||
* Change: Watched percent moved to general settings.
|
||||
* Change: Use human readable file sizes to the media info tables. (Thanks @logaritmisk)
|
||||
* Change: Update pytz library.
|
||||
|
||||
|
||||
## v1.4.12 (2016-09-18)
|
||||
|
||||
* Fix: PMS update check not working for MacOSX.
|
||||
* Fix: Square covers for music stats on homepage.
|
||||
* Fix: Card width on the homepage for iPhone 6/7 Plus. (Thanks @XusBadia)
|
||||
* Fix: Check for running PID when starting PlexPy. (Thanks @spolyack)
|
||||
* Fix: FreeBSD service script not stopping PlexPy properly.
|
||||
* Fix: Some web UI cleanup.
|
||||
* Change: GitHub repostitory moved.
|
||||
|
||||
|
||||
## v1.4.11 (2016-09-02)
|
||||
|
||||
* Fix: PlexWatch and Plexivity import errors.
|
||||
* Fix: Searching in history datatables.
|
||||
* Fix: Notifications not sending for Local user.
|
||||
|
||||
|
||||
## v1.4.10 (2016-08-15)
|
||||
|
||||
* Fix: Missing python ipaddress module preventing PlexPy from starting.
|
||||
|
||||
|
||||
## v1.4.9 (2016-08-14)
|
||||
|
||||
* New: Option to include current activity in the history tables.
|
||||
* New: ISP lookup info in the IP address modal.
|
||||
* New: Option to disable web page previews for Telegram notifications.
|
||||
* Fix: Send correct JSON header for Slack/Mattermost notifications.
|
||||
* Fix: Twitter and Facebook test notifications incorrectly showing as "failed".
|
||||
* Fix: Current activity progress bars extending past 100%.
|
||||
* Fix: Typo in the setup wizard. (Thanks @wopian)
|
||||
* Fix: Update PMS server version before checking for a new update.
|
||||
* Change: Compare distro and build when checking for server updates.
|
||||
* Change: Nicer y-axis intervals when viewing "Play Duration" graphs.
|
||||
|
||||
|
||||
## v1.4.8 (2016-07-16)
|
||||
|
||||
* New: Setting to specify PlexPy backup interval.
|
||||
* Fix: User Concurrent Streams Notifications by IP Address checkbox not working.
|
||||
* Fix: Substitute {update_version} in fallback PMS update notification text.
|
||||
* Fix: Check version for automatic IP logging setting.
|
||||
* Fix: Use library refresh interval.
|
||||
|
||||
|
||||
## v1.4.7 (2016-07-14)
|
||||
|
||||
* New: Use MaxMind GeoLite2 for IP address lookup.
|
||||
* Note: The GeoLite2 database must be installed from the settings page.
|
||||
* New: Check for Plex updates using plex.tv downloads instead of the server API.
|
||||
* Note: Check for Plex updates has been disabled and must be re-enabled in the settings.
|
||||
* New: More notification options for Plex updates.
|
||||
* New: Notifications for concurrent streams by a single user.
|
||||
* New: Notifications for user streaming from a new device.
|
||||
* New: HipChat notification agent. (Thanks @aboron)
|
||||
* Fix: Username showing as blank when friendly name is blank.
|
||||
* Fix: Direct stream count wrong in the current activity header.
|
||||
* Fix: Current activity reporting direct stream when reducing the stream quality switches to transcoding.
|
||||
* Fix: Apostophe in an Arnold quote causing the shutdown/restart page to crash.
|
||||
* Fix: Disable refreshing posters in guest mode.
|
||||
* Fix: PlexWatch/Plexivity import unable to select the "grouped" database table.
|
||||
* Change: Updated Facebook notification instructions.
|
||||
* Change: Subject line optional for Join notifications.
|
||||
* Change: Line break between subject and body text instead of a colon for Facebook, Slack, Twitter, and Telegram.
|
||||
* Change: Allow Mattermost notifications using the Slack config.
|
||||
* Change: Better formatting for Slack poster notifications.
|
||||
* Change: Telegram only notifies once instead of twice when posters are enabled.
|
||||
* Change: Host Open Sans font locally instead of querying Google Fonts.
|
||||
|
||||
|
||||
## v1.4.6 (2016-06-11)
|
||||
|
||||
* New: Added User and Library statistics to the API.
|
||||
* New: Ability to refresh individual poster images without clearing the entire cache. (Thanks @Hellowlol)
|
||||
* New: Added {added_date}, {updated_date}, and {last_viewed_date} to metadata notification options.
|
||||
* New: Log level filter for Plex logs. (Thanks @sanderploegsma)
|
||||
* New: Log level filter for PlexPy logs.
|
||||
* New: Button to download Plex logs directly from the web interface.
|
||||
* New: Advanced setting in the config file to change the number of Plex log lines retrieved.
|
||||
* Fix: FreeBSD and FreeNAS init scripts to reflect the path in the installation guide. (Thanks @nortron)
|
||||
* Fix: Monitoring crashing when failed to retrieve current activity.
|
||||
|
||||
|
||||
## v1.4.5 (2016-05-25)
|
||||
|
||||
* Fix: PlexPy unable to start if failed to get shared libraries for a user.
|
||||
|
@@ -9,14 +9,14 @@ In case you read this because you are posting an issue, please take a minute and
|
||||
- Turning your device off and on again.
|
||||
- Analyzing your logs, you just might find the solution yourself!
|
||||
- Using the **search** function to see if this issue has already been reported/solved.
|
||||
- Checking the [Wiki](https://github.com/drzoidberg33/plexpy/wiki) for
|
||||
[ [Installation] ](https://github.com/drzoidberg33/plexpy/wiki/Installation) and
|
||||
[ [FAQs] ](https://github.com/drzoidberg33/plexpy/wiki/Frequently-Asked-Questions-(FAQ)).
|
||||
- For basic questions try asking on [Gitter](https://gitter.im/drzoidberg33/plexpy) or the [Plex Forums](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program) first before opening an issue.
|
||||
- Checking the [Wiki](https://github.com/JonnyWong16/plexpy/wiki) for
|
||||
[ [Installation] ](https://github.com/JonnyWong16/plexpy/wiki/Installation) and
|
||||
[ [FAQs] ](https://github.com/JonnyWong16/plexpy/wiki/Frequently-Asked-Questions-(FAQ)).
|
||||
- For basic questions try asking on [Gitter](https://gitter.im/plexpy/general) or the [Plex Forums](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program) first before opening an issue.
|
||||
|
||||
##### If nothing has worked:
|
||||
|
||||
1. Open a new issue on the GitHub [issue tracker](http://github.com/drzoidberg33/plexpy/issues).
|
||||
1. Open a new issue on the GitHub [issue tracker](http://github.com/JonnyWong16/plexpy/issues).
|
||||
2. Provide a clear title to easily help identify your problem.
|
||||
3. Use proper [markdown syntax](https://help.github.com/articles/github-flavored-markdown) to structure your post (i.e. code/log in code blocks).
|
||||
4. Make sure you provide the following information:
|
||||
@@ -35,7 +35,7 @@ In case you read this because you are posting an issue, please take a minute and
|
||||
|
||||
## Feature Requests
|
||||
|
||||
Feature requests are handled on [FeatHub](http://feathub.com/drzoidberg33/plexpy).
|
||||
Feature requests are handled on [FeatHub](http://feathub.com/JonnyWong16/plexpy).
|
||||
|
||||
1. Search the existing requests to see if your suggestion has already been submitted.
|
||||
2. If a similar request exists, give it a thumbs up (+1), or add additional comments to the request.
|
||||
|
@@ -8,7 +8,7 @@ Reporting Issues:
|
||||
Please use [Gist](http://gist.github.com) or [Pastebin](http://pastebin.com/).
|
||||
|
||||
Feature Requests:
|
||||
* Feature requests are handled on FeatHub: http://feathub.com/drzoidberg33/plexpy
|
||||
* Feature requests are handled on FeatHub: http://feathub.com/JonnyWong16/plexpy
|
||||
* Do not post them on the GitHub issues tracker.
|
||||
-->
|
||||
|
||||
|
17
PlexPy.py
17
PlexPy.py
@@ -122,8 +122,21 @@ def main():
|
||||
# If the pidfile already exists, plexpy may still be running, so
|
||||
# exit
|
||||
if os.path.exists(plexpy.PIDFILE):
|
||||
raise SystemExit("PID file '%s' already exists. Exiting." %
|
||||
plexpy.PIDFILE)
|
||||
try:
|
||||
with open(plexpy.PIDFILE, 'r') as fp:
|
||||
pid = int(fp.read())
|
||||
os.kill(pid, 0)
|
||||
except IOError as e:
|
||||
raise SystemExit("Unable to read PID file: %s", e)
|
||||
except OSError:
|
||||
logger.warn("PID file '%s' already exists, but PID %d is " \
|
||||
"not running. Ignoring PID file." %
|
||||
(plexpy.PIDFILE, pid))
|
||||
else:
|
||||
# The pidfile exists and points to a live PID. plexpy may
|
||||
# still be running, so exit.
|
||||
raise SystemExit("PID file '%s' already exists. Exiting." %
|
||||
plexpy.PIDFILE)
|
||||
|
||||
# The pidfile is only useful in daemon mode, make sure we can write the
|
||||
# file properly
|
||||
|
30
README.md
30
README.md
@@ -1,12 +1,14 @@
|
||||
# PlexPy
|
||||
|
||||
[](https://gitter.im/drzoidberg33/plexpy?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://gitter.im/plexpy/general?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||
A python based web application for monitoring, analytics and notifications for Plex Media Server (www.plex.tv).
|
||||
A python based web application for monitoring, analytics and notifications for [Plex Media Server](https://plex.tv).
|
||||
|
||||
This project is based on code from [Headphones](https://github.com/rembo10/headphones) and [PlexWatchWeb](https://github.com/ecleese/plexWatchWeb).
|
||||
|
||||
* PlexPy [forum thread](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program)
|
||||
* [Plex forum thread](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program)
|
||||
* [Gitter chat](https://gitter.im/plexpy/general)
|
||||
* [/r/Plex Discord server](https://discord.gg/011TFFWSuNFI02EKr) | [PlexPy Discord server](https://discord.gg/36ggawe)
|
||||
|
||||
## Features
|
||||
|
||||
@@ -25,10 +27,16 @@ This project is based on code from [Headphones](https://github.com/rembo10/headp
|
||||
* Full sync list data on all users syncing items from your library.
|
||||
* And many more!!
|
||||
|
||||
## Preview
|
||||
|
||||
* [Full preview gallery on Imgur](https://imgur.com/a/RwQPM)
|
||||
|
||||

|
||||
|
||||
## Installation and Support
|
||||
|
||||
* [Installation Guides](https://github.com/drzoidberg33/plexpy/wiki/Installation) shows you how to install PlexPy.
|
||||
* [FAQs](https://github.com/drzoidberg33/plexpy/wiki/Frequently-Asked-Questions-(FAQ)) in the wiki can help you with common problems.
|
||||
* [Installation Guides](https://github.com/JonnyWong16/plexpy/wiki/Installation) shows you how to install PlexPy.
|
||||
* [FAQs](https://github.com/JonnyWong16/plexpy/wiki/Frequently-Asked-Questions-(FAQ)) in the wiki can help you with common problems.
|
||||
|
||||
**Support** the project by implementing new features, solving support tickets and provide bug fixes.
|
||||
|
||||
@@ -40,14 +48,14 @@ This project is based on code from [Headphones](https://github.com/rembo10/headp
|
||||
- Turning your device off and on again.
|
||||
- Analyzing your logs, you just might find the solution yourself!
|
||||
- Using the **search** function to see if this issue has already been reported/solved.
|
||||
- Checking the [Wiki](https://github.com/drzoidberg33/plexpy/wiki) for
|
||||
[ [Installation] ](https://github.com/drzoidberg33/plexpy/wiki/Installation) and
|
||||
[ [FAQs] ](https://github.com/drzoidberg33/plexpy/wiki/Frequently-Asked-Questions-(FAQ)).
|
||||
- For basic questions try asking on [Gitter](https://gitter.im/drzoidberg33/plexpy) or the [Plex Forums](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program) first before opening an issue.
|
||||
- Checking the [Wiki](https://github.com/JonnyWong16/plexpy/wiki) for
|
||||
[ [Installation] ](https://github.com/JonnyWong16/plexpy/wiki/Installation) and
|
||||
[ [FAQs] ](https://github.com/JonnyWong16/plexpy/wiki/Frequently-Asked-Questions-(FAQ)).
|
||||
- For basic questions try asking on [Gitter](https://gitter.im/plexpy/general) or the [Plex Forums](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program) first before opening an issue.
|
||||
|
||||
##### If nothing has worked:
|
||||
|
||||
1. Open a new issue on the GitHub [issue tracker](http://github.com/drzoidberg33/plexpy/issues).
|
||||
1. Open a new issue on the GitHub [issue tracker](http://github.com/JonnyWong16/plexpy/issues).
|
||||
2. Provide a clear title to easily help identify your problem.
|
||||
3. Use proper [markdown syntax](https://help.github.com/articles/github-flavored-markdown) to structure your post (i.e. code/log in code blocks).
|
||||
4. Make sure you provide the following information:
|
||||
@@ -66,7 +74,7 @@ This project is based on code from [Headphones](https://github.com/rembo10/headp
|
||||
|
||||
## Feature Requests
|
||||
|
||||
Feature requests are handled on [FeatHub](http://feathub.com/drzoidberg33/plexpy).
|
||||
Feature requests are handled on [FeatHub](http://feathub.com/JonnyWong16/plexpy).
|
||||
|
||||
1. Search the existing requests to see if your suggestion has already been submitted.
|
||||
2. If a similar request exists, give it a thumbs up (+1), or add additional comments to the request.
|
||||
|
@@ -30,7 +30,7 @@
|
||||
<div class="col-xs-4">
|
||||
<select id="table_name" class="form-control" name="table_name">
|
||||
<option value="processed">processed</option>
|
||||
<option value="processed">grouped</option>
|
||||
<option value="grouped">grouped</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -15,7 +15,7 @@
|
||||
<link href="${http_root}css/bootstrap3/bootstrap.css" rel="stylesheet">
|
||||
<link href="${http_root}css/pnotify.custom.min.css" rel="stylesheet" />
|
||||
<link href="${http_root}css/plexpy.css" rel="stylesheet">
|
||||
<link href="https://fonts.googleapis.com/css?family=Open+Sans:400,600" rel="stylesheet" type="text/css">
|
||||
<link href="${http_root}css/opensans.min.css" rel="stylesheet">
|
||||
<link href="${http_root}css/font-awesome.min.css" rel="stylesheet">
|
||||
${next.headIncludes()}
|
||||
|
||||
@@ -170,7 +170,7 @@
|
||||
<form action="search" method="post" class="form" id="search_form">
|
||||
<div class="input-group">
|
||||
<span class="input-textbox">
|
||||
<input type="text" class="form-control" name="query" id="query" aria-label="Search" placeholder="Search..."/>
|
||||
<input type="text" class="form-control" name="query" id="query" aria-label="Search" placeholder="Search Plex library..."/>
|
||||
</span>
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-dark btn-inactive" type="submit" id="search_button"><i class="fa fa-search"></i></button>
|
||||
@@ -220,6 +220,8 @@
|
||||
<li><a href="settings"><i class="fa fa-fw fa-cogs"></i> Settings</a></li>
|
||||
<li role="separator" class="divider"></li>
|
||||
<li><a href="logs"><i class="fa fa-fw fa-list-alt"></i> View Logs</a></li>
|
||||
<li><a href="${anon_url('https://github.com/%s/plexpy/wiki/Frequently-Asked-Questions-(FAQ)' % plexpy.CONFIG.GIT_USER)}" target="_blank"><i class="fa fa-fw fa-question-circle"></i> FAQ</a></li>
|
||||
<li><a href="settings?support=true"><i class="fa fa-fw fa-comment"></i> Support</a></li>
|
||||
<li role="separator" class="divider"></li>
|
||||
<li><a href="${anon_url('https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=DG783BMSCU3V4')}" target="_blank"><i class="fa fa-fw fa-paypal"></i> Paypal</a></li>
|
||||
<li><a href="${anon_url('http://swiftpanda16.tip.me/')}" target="_blank"><i class="fa fa-fw fa-btc"></i> Bitcoin</a></li>
|
||||
|
172
data/interfaces/default/configuration_table.html
Normal file
172
data/interfaces/default/configuration_table.html
Normal file
@@ -0,0 +1,172 @@
|
||||
<%doc>
|
||||
USAGE DOCUMENTATION :: PLEASE LEAVE THIS AT THE TOP OF THIS FILE
|
||||
|
||||
For Mako templating syntax documentation please visit: http://docs.makotemplates.org/en/latest/
|
||||
|
||||
Filename: configuration_table.html
|
||||
Version: 0.1
|
||||
|
||||
DOCUMENTATION :: END
|
||||
</%doc>
|
||||
|
||||
<%!
|
||||
import os
|
||||
import sys
|
||||
import plexpy
|
||||
from plexpy import common, logger
|
||||
from plexpy.helpers import anon_url
|
||||
%>
|
||||
|
||||
<table class="config-info-table small-muted">
|
||||
<tbody>
|
||||
% if plexpy.CURRENT_VERSION:
|
||||
<tr>
|
||||
<td>Git Branch:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/%s/plexpy/tree/%s' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_BRANCH))}">${plexpy.CONFIG.GIT_BRANCH}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Git Commit Hash:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/%s/plexpy/commit/%s' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_BRANCH))}">${plexpy.CURRENT_VERSION}</a></td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Configuration File:</td>
|
||||
<td>${plexpy.CONFIG_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Database File:</td>
|
||||
<td>${plexpy.DB_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Log File:</td>
|
||||
<td><a class="no-highlight" href="logFile" target="_blank">${os.path.join(plexpy.CONFIG.LOG_DIR, logger.FILENAME)}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Backup Directory:</td>
|
||||
<td>${plexpy.CONFIG.BACKUP_DIR}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Cache Directory:</td>
|
||||
<td>${plexpy.CONFIG.CACHE_DIR}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>GeoLite2 Database:</td>
|
||||
% if plexpy.CONFIG.GEOIP_DB:
|
||||
<td>${plexpy.CONFIG.GEOIP_DB} | <a class="no-highlight" href="#" id="reinstall_geoip_db">Reinstall / Update</a> | <a class="no-highlight" href="#" id="uninstall_geoip_db">Uninstall</a></td>
|
||||
% else:
|
||||
<td><a class="no-highlight" href="#" id="install_geoip_db">Click here to install the GeoLite2 database.</a></td>
|
||||
% endif
|
||||
</tr>
|
||||
% if plexpy.ARGS:
|
||||
<tr>
|
||||
<td>Arguments:</td>
|
||||
<td>${plexpy.ARGS}</td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Platform:</td>
|
||||
<td>${common.PLATFORM} ${common.PLATFORM_VERSION}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Python Version:</td>
|
||||
<td>${sys.version}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="top-line">Resources:</td>
|
||||
<td class="top-line">
|
||||
<a id="source-link" class="no-highlight" href="${anon_url('https://github.com/%s/plexpy' % plexpy.CONFIG.GIT_USER)}" target="_blank">GitHub Source</a> |
|
||||
<a class="no-highlight guidelines-modal-link" href="${anon_url('https://github.com/%s/plexpy/issues' % plexpy.CONFIG.GIT_USER)}" data-id="issue">GitHub Issues</a> |
|
||||
<a class="no-highlight guidelines-modal-link" href="${anon_url('http://feathub.com/%s/plexpy' % plexpy.CONFIG.GIT_USER)}" data-id="feature request">FeatHub Feature Requests</a> |
|
||||
<a class="no-highlight" href="${anon_url('https://github.com/%s/plexpy/wiki' % plexpy.CONFIG.GIT_USER)}" target="_blank">PlexPy Wiki</a> |
|
||||
<a id="faq-source-link" class="no-highlight" href="${anon_url('https://github.com/%s/plexpy/wiki/Frequently-Asked-Questions-(FAQ)' % plexpy.CONFIG.GIT_USER)}" target="_blank">PlexPy FAQ</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Support:</td>
|
||||
<td>
|
||||
<a class="no-highlight support-modal-link" href="${anon_url('https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program')}" target="_blank">Plex Forums</a> |
|
||||
<a class="no-highlight support-modal-link" href="${anon_url('https://gitter.im/plexpy/general')}" target="_blank">PlexPy Gitter Chat</a> |
|
||||
<a id="best-support-link" class="no-highlight support-modal-link" href="${anon_url('https://discord.gg/011TFFWSuNFI02EKr')}" target="_blank">/r/Plex Discord Server</a> |
|
||||
<a class="no-highlight support-modal-link" href="${anon_url('https://discord.gg/36ggawe')}" target="_blank">PlexPy Discord Server</a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div id="guidelines-modal" class="modal fade" tabindex="-1" role="dialog" aria-labelledby="guidelines-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">Guidelines</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div style="text-align: center; margin-top: 20px; margin-bottom: 20px;">
|
||||
<strong>Please read the <a href="#" target="_blank" id="guidelines-link">guidelines</a> in the README document <br />before submitting a new <span id="guidelines-type"></span>!</strong>
|
||||
<br /><br />
|
||||
Your post may be removed for failure to follow the guidelines.
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="#" target="_blank" id="guidelines-continue" class="btn btn-bright">Continue</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="support-modal" class="modal fade" tabindex="-1" role="dialog" aria-labelledby="support-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">Support</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div style="text-align: center; margin-top: 20px; margin-bottom: 20px;">
|
||||
<strong>Please read the <a href="#" target="_blank" id="faq-link">FAQ</a> before asking for help!</strong>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="#" target="_blank" id="support-continue" class="btn btn-bright">Continue</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$("#install_geoip_db, #reinstall_geoip_db").click(function () {
|
||||
var msg = 'Are you sure you want to install the GeoLite2 database?<br /><br />' +
|
||||
'The database is used to lookup IP address geolocation info.<br />' +
|
||||
'The database will be downloaded from <a href="${anon_url("https://dev.maxmind.com/geoip/geoip2/geolite2/")}" target="_blank">MaxMind</a>, <br />' +
|
||||
'and requires <strong>100MB</strong> of free space to install in your PlexPy directory.<br />'
|
||||
var url = 'install_geoip_db';
|
||||
confirmAjaxCall(url, msg, 'Installing GeoLite2 database.', getConfigurationTable);
|
||||
});
|
||||
|
||||
$("#uninstall_geoip_db").click(function () {
|
||||
var msg = 'Are you sure you want to uninstall the GeoLite2 database?<br /><br />' +
|
||||
'You will not be able to lookup IP address geolocation info.';
|
||||
var url = 'uninstall_geoip_db';
|
||||
confirmAjaxCall(url, msg, 'Uninstalling GeoLite2 database.', getConfigurationTable);
|
||||
});
|
||||
|
||||
$('.guidelines-modal-link').on('click', function (e) {
|
||||
e.preventDefault();
|
||||
$('#guidelines-link').attr('href', $('#source-link').attr('href'));
|
||||
$('#guidelines-type').text($(this).data('id'))
|
||||
$('#guidelines-modal').modal();
|
||||
$('#guidelines-continue').attr('href', $(this).attr('href')).on('click', function () {
|
||||
$('#guidelines-modal').modal('hide');
|
||||
});
|
||||
});
|
||||
$('.support-modal-link').on('click', function (e) {
|
||||
e.preventDefault();
|
||||
$('#faq-link').attr('href', $('#faq-source-link').attr('href'));
|
||||
$('#support-modal').modal();
|
||||
$('#support-continue').attr('href', $(this).attr('href')).on('click', function () {
|
||||
$('#support-modal').modal('hide');
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
1
data/interfaces/default/css/opensans.min.css
vendored
Normal file
1
data/interfaces/default/css/opensans.min.css
vendored
Normal file
@@ -0,0 +1 @@
|
||||
@font-face{font-family:'Open Sans';font-weight:400;font-style:normal;src:url(../fonts/Open-Sans-regular/Open-Sans-regular.eot);src:url(../fonts/Open-Sans-regular/Open-Sans-regular.eot?#iefix) format('embedded-opentype'),local('Open Sans'),local('Open-Sans-regular'),url(../fonts/Open-Sans-regular/Open-Sans-regular.woff2) format('woff2'),url(../fonts/Open-Sans-regular/Open-Sans-regular.woff) format('woff'),url(../fonts/Open-Sans-regular/Open-Sans-regular.ttf) format('truetype'),url(../fonts/Open-Sans-regular/Open-Sans-regular.svg#OpenSans) format('svg')}@font-face{font-family:'Open Sans';font-weight:600;font-style:normal;src:url(../fonts/Open-Sans-600/Open-Sans-600.eot);src:url(../fonts/Open-Sans-600/Open-Sans-600.eot?#iefix) format('embedded-opentype'),local('Open Sans Semibold'),local('Open-Sans-600'),url(../fonts/Open-Sans-600/Open-Sans-600.woff2) format('woff2'),url(../fonts/Open-Sans-600/Open-Sans-600.woff) format('woff'),url(../fonts/Open-Sans-600/Open-Sans-600.ttf) format('truetype'),url(../fonts/Open-Sans-600/Open-Sans-600.svg#OpenSans) format('svg')}
|
@@ -598,6 +598,7 @@ a .users-poster-face:hover {
|
||||
}
|
||||
.dashboard-instance.hover .bar {
|
||||
height: 14px;
|
||||
max-width: 100%;
|
||||
transform-origin: top;
|
||||
transition: all .2s ease;
|
||||
border-radius: 0px 0px 3px 3px;
|
||||
@@ -608,6 +609,7 @@ a .users-poster-face:hover {
|
||||
}
|
||||
.dashboard-instance.hover .bufferbar {
|
||||
height: 14px;
|
||||
max-width: 100%;
|
||||
transform-origin: top;
|
||||
transition: all .2s ease;
|
||||
border-radius: 0px 0px 3px 3px;
|
||||
@@ -836,6 +838,7 @@ a .users-poster-face:hover {
|
||||
background-color: #444;
|
||||
position: absolute;
|
||||
height: 6px;
|
||||
max-width: 100%;
|
||||
overflow: hidden;
|
||||
}
|
||||
.dashboard-activity-progress .bar {
|
||||
@@ -853,6 +856,7 @@ a .users-poster-face:hover {
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffbb450', endColorstr='#fff89406', GradientType=0);
|
||||
position: absolute;
|
||||
height: 6px;
|
||||
max-width: 100%;
|
||||
overflow: hidden;
|
||||
}
|
||||
.dashboard-activity-metadata-wrapper {
|
||||
@@ -1184,6 +1188,7 @@ a:hover .dashboard-recent-media-cover {
|
||||
margin: 0 40px 0 25px;
|
||||
height: 100px;
|
||||
overflow: visible;
|
||||
position: relative;
|
||||
}
|
||||
.summary-poster-face {
|
||||
background-position: center;
|
||||
@@ -1922,6 +1927,13 @@ a .library-user-instance-box:hover {
|
||||
.home-platforms-instance-poster {
|
||||
margin-left: 0px;
|
||||
position: absolute;
|
||||
overflow: hidden;
|
||||
}
|
||||
.home-platforms-instance-cover {
|
||||
margin-left: 0px;
|
||||
position: absolute;
|
||||
top: 20px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.home-platforms-instance-poster .home-platforms-poster-face {
|
||||
background-position: center;
|
||||
@@ -1932,6 +1944,15 @@ a .library-user-instance-box:hover {
|
||||
-moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
}
|
||||
.home-platforms-instance-cover .home-platforms-cover-face {
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
height: 80px;
|
||||
width: 80px;
|
||||
-webkit-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
-moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
}
|
||||
.home-platforms-instance-poster .home-platforms-library-thumb {
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
@@ -2079,6 +2100,13 @@ a .library-user-instance-box:hover {
|
||||
.home-platforms-instance-list-poster {
|
||||
position: absolute;
|
||||
left: 20px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.home-platforms-instance-list-cover {
|
||||
position: absolute;
|
||||
top: 10px;
|
||||
left: 20px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.home-platforms-instance-list-poster .home-platforms-list-poster-face {
|
||||
background-position: center;
|
||||
@@ -2089,6 +2117,15 @@ a .library-user-instance-box:hover {
|
||||
-moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
}
|
||||
.home-platforms-instance-list-cover .home-platforms-list-cover-face {
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
height: 40px;
|
||||
width: 40px;
|
||||
-webkit-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
-moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
}
|
||||
.home-platforms-instance-list-box {
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
@@ -2147,7 +2184,9 @@ a .home-platforms-instance-oval:hover,
|
||||
a .home-platforms-instance-list-box:hover,
|
||||
a .home-platforms-instance-list-oval:hover,
|
||||
a .home-platforms-poster-face:hover,
|
||||
a .home-platforms-list-poster-face:hover
|
||||
a .home-platforms-cover-face:hover,
|
||||
a .home-platforms-list-poster-face:hover,
|
||||
a .home-platforms-list-cover-face:hover
|
||||
{
|
||||
-webkit-box-shadow: inset 0 0 0 2px #e9a049;
|
||||
-moz-box-shadow: inset 0 0 0 2px #e9a049;
|
||||
@@ -2623,7 +2662,7 @@ a .home-platforms-list-poster-face:hover
|
||||
|
||||
@media only screen
|
||||
and (min-device-width: 300px)
|
||||
and (max-device-width: 400px) {
|
||||
and (max-device-width: 450px) {
|
||||
.home-platforms-instance {
|
||||
width: calc(100% - 20px);
|
||||
}
|
||||
@@ -2703,6 +2742,13 @@ div[id^='media_info_child'] div[id^='media_info_child'] div.dataTables_scrollHea
|
||||
.dataTables_scrollBody {
|
||||
-webkit-overflow-scrolling: touch;
|
||||
}
|
||||
.current-activity-row {
|
||||
background-color: rgba(255,255,255,.1) !important;
|
||||
}
|
||||
.current-activity-row:hover {
|
||||
background-color: rgba(255,255,255,0.125) !important;
|
||||
}
|
||||
|
||||
#search_form {
|
||||
width: 300px;
|
||||
padding: 8px 15px;
|
||||
@@ -2964,4 +3010,55 @@ a.no-highlight:hover {
|
||||
background-color: #555;
|
||||
border: 0px solid #444;
|
||||
border-radius: 3px;
|
||||
}
|
||||
.overlay-refresh-image {
|
||||
opacity: 0;
|
||||
color: #000;
|
||||
font-size: 16px;
|
||||
float: left;
|
||||
position: absolute;
|
||||
top: 0px;
|
||||
right: 10px;
|
||||
z-index: 1;
|
||||
transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
-webkit-transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
-moz-transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
-o-transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
text-shadow: -1px -1px 0 #fff, 1px -1px 0 #fff, -1px 1px 0 #fff, 1px 1px 0 #fff;
|
||||
}
|
||||
.overlay-refresh-image.left {
|
||||
left: 10px;
|
||||
}
|
||||
.overlay-refresh-image.info-art {
|
||||
color: #999;
|
||||
top: 15px;
|
||||
right: 25px;
|
||||
opacity: 1;
|
||||
text-shadow: none;
|
||||
cursor: pointer;
|
||||
}
|
||||
.overlay-refresh-image.info-art:hover {
|
||||
color: #fff;
|
||||
text-shadow: none;
|
||||
}
|
||||
a:hover .overlay-refresh-image {
|
||||
opacity: .25;
|
||||
top: 8px;
|
||||
}
|
||||
a:hover .overlay-refresh-image:hover {
|
||||
opacity: .9;
|
||||
}
|
||||
#ip_error, #isp_error {
|
||||
color: #aaa;
|
||||
display: none;
|
||||
text-align: center;
|
||||
padding-top: 10px;
|
||||
padding-bottom: 10px;
|
||||
}
|
||||
#plexpy-log-levels label,
|
||||
#plex-log-levels label {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
#api_key.form-control[disabled] {
|
||||
background-color: #555;
|
||||
}
|
@@ -106,6 +106,9 @@ DOCUMENTATION :: END
|
||||
% else:
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(${a['art']});"></div>
|
||||
% endif
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="dashboard-activity-button-info">
|
||||
<button type="button" class="btn btn-activity-info btn-lg" data-target="#stream-${a['session_key']}">
|
||||
<i class="fa fa-info-circle"></i>
|
||||
|
@@ -108,6 +108,9 @@ DOCUMENTATION :: END
|
||||
% else:
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(${data['art']});"></div>
|
||||
% endif
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image left" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="dashboard-activity-button-info">
|
||||
<button type="button" class="btn btn-activity-info btn-lg" data-target="#stream-${data['session_key']}" data-id="${data['session_key']}">
|
||||
<i class="fa fa-info-circle"></i>
|
||||
@@ -132,79 +135,39 @@ DOCUMENTATION :: END
|
||||
% endif
|
||||
</span>
|
||||
</div>
|
||||
% if data['media_type'] == 'track':
|
||||
% if data['audio_decision'] == 'direct play':
|
||||
Stream <strong>Direct Play</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>
|
||||
Transcoding
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
(Speed: ${data['transcode_speed']})
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
% if data['video_decision'] == 'transcode' or data['audio_decision'] == 'transcode':
|
||||
Stream <strong>Transcode (Speed: ${data['transcode_speed']})
|
||||
% if data['throttled'] == '1':
|
||||
(Throttled)
|
||||
% endif
|
||||
</span>
|
||||
</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['audio_decision'] == 'direct play':
|
||||
Audio <strong>Direct Play (${data['audio_codec']}) (${data['audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Audio <strong>Direct Stream (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'transcode':
|
||||
Audio <strong>Transcode (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% endif
|
||||
% elif data['media_type'] == 'episode' or data['media_type'] == 'movie' or data['media_type'] == 'clip':
|
||||
% if data['video_decision'] == 'direct play' and data['audio_decision'] == 'direct play':
|
||||
Stream <strong>Direct Play</strong>
|
||||
% elif data['video_decision'] == 'copy' and data['audio_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>
|
||||
Transcoding
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
(Speed: ${data['transcode_speed']})
|
||||
% if data['throttled'] == '1':
|
||||
(Throttled)
|
||||
</strong>
|
||||
% elif data['video_decision'] == 'copy' or data['audio_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>Direct Play</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['video_decision'] and data['media_type'] != 'photo':
|
||||
% if data['video_decision'] == 'transcode':
|
||||
Video <strong>Transcode (${data['transcode_video_codec']}) (${data['transcode_width']}x${data['transcode_height']})</strong>
|
||||
% elif data['video_decision'] == 'copy':
|
||||
Video <strong>Direct Stream (${data['transcode_video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% else:
|
||||
Video <strong>Direct Play (${data['video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% endif
|
||||
</span>
|
||||
</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['video_decision'] == 'direct play':
|
||||
Video <strong>Direct Play (${data['video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% elif data['video_decision'] == 'copy':
|
||||
Video <strong>Direct Stream (${data['transcode_video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% elif data['video_decision'] == 'transcode':
|
||||
Video <strong>Transcode (${data['transcode_video_codec']}) (${data['transcode_width']}x${data['transcode_height']})</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['audio_decision'] == 'direct play':
|
||||
Audio <strong>Direct Play (${data['audio_codec']}) (${data['audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Audio <strong>Direct Stream (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'transcode':
|
||||
Audio <strong>Transcode (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% endif
|
||||
% elif data['media_type'] == 'photo':
|
||||
% if data['video_decision'] == 'direct play':
|
||||
Stream <strong>Direct Play</strong>
|
||||
% elif data['video_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
(Speed: ${data['transcode_speed']})
|
||||
% if data['throttled'] == '1':
|
||||
(Throttled)
|
||||
<br />
|
||||
% endif
|
||||
% if data['audio_decision']:
|
||||
% if data['audio_decision'] == 'transcode':
|
||||
Audio <strong>Transcode (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Audio <strong>Direct Stream (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% else:
|
||||
Audio <strong>Direct Play (${data['audio_codec']}) (${data['audio_channels']}ch)</strong>
|
||||
% endif
|
||||
</span>
|
||||
</strong>
|
||||
% endif
|
||||
% endif
|
||||
<br>
|
||||
% endif
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
% if data['media_type'] != 'photo':
|
||||
|
202
data/interfaces/default/fonts/Open-Sans-600/LICENSE.txt
Normal file
202
data/interfaces/default/fonts/Open-Sans-600/LICENSE.txt
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.eot
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.eot
Normal file
Binary file not shown.
1637
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.svg
Normal file
1637
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.svg
Normal file
File diff suppressed because it is too large
Load Diff
After Width: | Height: | Size: 104 KiB |
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.ttf
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.ttf
Normal file
Binary file not shown.
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff
Normal file
Binary file not shown.
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff2
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff2
Normal file
Binary file not shown.
202
data/interfaces/default/fonts/Open-Sans-regular/LICENSE.txt
Normal file
202
data/interfaces/default/fonts/Open-Sans-regular/LICENSE.txt
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
After Width: | Height: | Size: 105 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -341,6 +341,14 @@
|
||||
|
||||
|
||||
var music_visible = (${config['music_logging_enable']} == 1 ? true : false);
|
||||
|
||||
function dataSecondsToHours(data) {
|
||||
$.each(data.series, function (i, series) {
|
||||
series.data = $.map(series.data, function (value) {
|
||||
return value / 60 / 60;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function loadGraphsTab1(time_range, yaxis) {
|
||||
$('#days-selection').show();
|
||||
@@ -354,18 +362,19 @@
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
var dateArray = [];
|
||||
for (var i = 0; i < data.categories.length; i++) {
|
||||
dateArray.push(moment(data.categories[i], 'YYYY-MM-DD').valueOf());
|
||||
$.each(data.categories, function (i, day) {
|
||||
dateArray.push(moment(day, 'YYYY-MM-DD').valueOf());
|
||||
// Highlight the weekend
|
||||
if ((moment(data.categories[i], 'YYYY-MM-DD').format('ddd') == 'Sat') ||
|
||||
(moment(data.categories[i], 'YYYY-MM-DD').format('ddd') == 'Sun')) {
|
||||
if ((moment(day, 'YYYY-MM-DD').format('ddd') == 'Sat') ||
|
||||
(moment(day, 'YYYY-MM-DD').format('ddd') == 'Sun')) {
|
||||
hc_plays_by_day_options.xAxis.plotBands.push({
|
||||
from: i-0.5,
|
||||
to: i+0.5,
|
||||
color: 'rgba(80,80,80,0.3)'
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_day_options.yAxis.min = 0;
|
||||
hc_plays_by_day_options.xAxis.categories = dateArray;
|
||||
hc_plays_by_day_options.series = data.series;
|
||||
@@ -380,6 +389,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_dayofweek_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_dayofweek_options.series = data.series;
|
||||
hc_plays_by_dayofweek_options.series[2].visible = music_visible;
|
||||
@@ -393,6 +403,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_hourofday_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_hourofday_options.series = data.series;
|
||||
hc_plays_by_hourofday_options.series[2].visible = music_visible;
|
||||
@@ -406,6 +417,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_platform_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_platform_options.series = data.series;
|
||||
hc_plays_by_platform_options.series[2].visible = music_visible;
|
||||
@@ -419,6 +431,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_user_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_user_options.series = data.series;
|
||||
hc_plays_by_user_options.series[2].visible = music_visible;
|
||||
@@ -439,18 +452,19 @@
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
var dateArray = [];
|
||||
for (var i = 0; i < data.categories.length; i++) {
|
||||
dateArray.push(moment(data.categories[i], 'YYYY-MM-DD').valueOf());
|
||||
$.each(data.categories, function (i, day) {
|
||||
dateArray.push(moment(day, 'YYYY-MM-DD').valueOf());
|
||||
// Highlight the weekend
|
||||
if ((moment(data.categories[i], 'YYYY-MM-DD').format('ddd') == 'Sat') ||
|
||||
(moment(data.categories[i], 'YYYY-MM-DD').format('ddd') == 'Sun')) {
|
||||
hc_plays_by_stream_type_options.xAxis.plotBands.push({
|
||||
if ((moment(day, 'YYYY-MM-DD').format('ddd') == 'Sat') ||
|
||||
(moment(day, 'YYYY-MM-DD').format('ddd') == 'Sun')) {
|
||||
hc_plays_by_day_options.xAxis.plotBands.push({
|
||||
from: i-0.5,
|
||||
to: i+0.5,
|
||||
color: 'rgba(80,80,80,0.3)'
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_stream_type_options.yAxis.min = 0;
|
||||
hc_plays_by_stream_type_options.xAxis.categories = dateArray;
|
||||
hc_plays_by_stream_type_options.series = data.series;
|
||||
@@ -464,6 +478,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_source_resolution_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_source_resolution_options.series = data.series;
|
||||
var hc_plays_by_source_resolution = new Highcharts.Chart(hc_plays_by_source_resolution_options);
|
||||
@@ -476,6 +491,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_stream_resolution_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_stream_resolution_options.series = data.series;
|
||||
var hc_plays_by_stream_resolution = new Highcharts.Chart(hc_plays_by_stream_resolution_options);
|
||||
@@ -488,6 +504,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_platform_by_stream_type_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_platform_by_stream_type_options.series = data.series;
|
||||
var hc_plays_by_platform_by_stream_type = new Highcharts.Chart(hc_plays_by_platform_by_stream_type_options);
|
||||
@@ -500,6 +517,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_user_by_stream_type_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_user_by_stream_type_options.series = data.series;
|
||||
var hc_plays_by_user_by_stream_type = new Highcharts.Chart(hc_plays_by_user_by_stream_type_options);
|
||||
@@ -518,6 +536,7 @@
|
||||
data: { y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_month_options.yAxis.min = 0;
|
||||
hc_plays_by_month_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_month_options.series = data.series;
|
||||
@@ -610,56 +629,55 @@
|
||||
if (type === 'plays') {
|
||||
yaxis_format = function() { return this.value; };
|
||||
tooltip_format = function() {
|
||||
if (moment(this.x, 'X').isValid() && (this.x > 946684800)) {
|
||||
var s = '<b>'+ moment(this.x).format("ddd MMM D") +'</b>';
|
||||
} else {
|
||||
var s = '<b>'+ this.x +'</b>';
|
||||
}
|
||||
if (this.points.length > 1) {
|
||||
var total = 0;
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+point.y;
|
||||
total += point.y;
|
||||
});
|
||||
s += '<br><b>Total: '+total+'</b>';
|
||||
} else {
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+point.y;
|
||||
});
|
||||
}
|
||||
return s;
|
||||
}
|
||||
if (moment(this.x, 'X').isValid() && (this.x > 946684800)) {
|
||||
var s = '<b>'+ moment(this.x).format('ddd MMM D') +'</b>';
|
||||
} else {
|
||||
var s = '<b>'+ this.x +'</b>';
|
||||
}
|
||||
if (this.points.length > 1) {
|
||||
var total = 0;
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+point.y;
|
||||
total += point.y;
|
||||
});
|
||||
s += '<br><b>Total: '+total+'</b>';
|
||||
} else {
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+point.y;
|
||||
});
|
||||
}
|
||||
return s;
|
||||
}
|
||||
stack_labels_format = function() {
|
||||
return this.total;
|
||||
}
|
||||
|
||||
return this.total;
|
||||
}
|
||||
$('.yaxis-text').html('Play count');
|
||||
} else {
|
||||
yaxis_format = function() { return moment.duration(this.value, 'seconds').format("H [h] m [m]"); };
|
||||
yaxis_format = function() { return moment.duration(this.value, 'hours').format('H [h] m [m]'); };
|
||||
tooltip_format = function() {
|
||||
if (moment(this.x, 'X').isValid() && (this.x > 946684800)) {
|
||||
var s = '<b>'+ moment(this.x).format("ddd MMM D") +'</b>';
|
||||
} else {
|
||||
var s = '<b>'+ this.x +'</b>';
|
||||
}
|
||||
if (this.points.length > 1) {
|
||||
var total = 0;
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+moment.duration(point.y, 'seconds').format('D [days] H [hrs] m [mins]');
|
||||
total += point.y;
|
||||
});
|
||||
s += '<br/><b>Total: '+moment.duration(total, 'seconds').format('D [days] H [hrs] m [mins]')+'</b>';
|
||||
} else {
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+moment.duration(point.y, 'seconds').format('D [days] H [hrs] m [mins]');
|
||||
});
|
||||
}
|
||||
return s;
|
||||
}
|
||||
if (moment(this.x, 'X').isValid() && (this.x > 946684800)) {
|
||||
var s = '<b>'+ moment(this.x).format('ddd MMM D') +'</b>';
|
||||
} else {
|
||||
var s = '<b>'+ this.x +'</b>';
|
||||
}
|
||||
if (this.points.length > 1) {
|
||||
var total = 0;
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+moment.duration(point.y, 'hours').format('D [days] H [hrs] m [mins]');
|
||||
total += point.y;
|
||||
});
|
||||
s += '<br/><b>Total: '+moment.duration(total, 'hours').format('D [days] H [hrs] m [mins]')+'</b>';
|
||||
} else {
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+moment.duration(point.y, 'hours').format('D [days] H [hrs] m [mins]');
|
||||
});
|
||||
}
|
||||
return s;
|
||||
}
|
||||
stack_labels_format = function() {
|
||||
var s = moment.duration(this.total, 'seconds').format("H [hrs] m [mins]");
|
||||
return s;
|
||||
}
|
||||
var s = moment.duration(this.total, 'hours').format('H [h] m [m]');
|
||||
return s;
|
||||
}
|
||||
$('.yaxis-text').html('Play duration');
|
||||
}
|
||||
|
||||
|
@@ -103,6 +103,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -149,6 +152,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -199,6 +205,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -241,6 +250,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -295,6 +307,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -341,6 +356,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -391,6 +409,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -433,6 +454,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -485,18 +509,21 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['rating_key']:
|
||||
<a href="info?rating_key=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||
% if top_stat['rows'][0]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
<div class="home-platforms-instance-cover">
|
||||
<div class="home-platforms-cover-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-cover">
|
||||
<div class="home-platforms-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
</a>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-cover">
|
||||
<div class="home-platforms-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
% if len(top_stat['rows']) > 1:
|
||||
@@ -531,18 +558,21 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['rating_key']:
|
||||
<a href="info?rating_key=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
<div class="home-platforms-instance-list-cover">
|
||||
<div class="home-platforms-list-cover-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-list-cover">
|
||||
<div class="home-platforms-list-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
</a>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-list-cover">
|
||||
<div class="home-platforms-list-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
<div class="home-platforms-instance-list-number">
|
||||
@@ -581,18 +611,21 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['rating_key']:
|
||||
<a href="info?rating_key=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
<div class="home-platforms-instance-cover">
|
||||
<div class="home-platforms-cover-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-cover">
|
||||
<div class="home-platforms-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
</a>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-cover">
|
||||
<div class="home-platforms-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
% if len(top_stat['rows']) > 1:
|
||||
@@ -623,18 +656,21 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['rating_key']:
|
||||
<a href="info?rating_key=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
<div class="home-platforms-instance-list-cover">
|
||||
<div class="home-platforms-list-cover-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-list-cover">
|
||||
<div class="home-platforms-list-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
</a>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-list-cover">
|
||||
<div class="home-platforms-list-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
<div class="home-platforms-instance-list-number">
|
||||
@@ -847,6 +883,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -903,6 +942,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
|
@@ -103,10 +103,20 @@
|
||||
type: 'GET',
|
||||
cache: false,
|
||||
async: true,
|
||||
error: function (xhr, status, error) {
|
||||
console.log(status + ': ' + error);
|
||||
},
|
||||
complete: function (xhr, status) {
|
||||
$('#dashboard-checking-activity').remove();
|
||||
|
||||
var current_activity = $.parseJSON(xhr.responseText);
|
||||
var current_activity;
|
||||
try {
|
||||
current_activity = $.parseJSON(xhr.responseText);
|
||||
} catch (e) {
|
||||
console.log(status + ': ' + e);
|
||||
current_activity = null;
|
||||
}
|
||||
|
||||
if (!(current_activity)) {
|
||||
$('#currentActivity').html('<div id="dashboard-no-activity" class="text-muted">There was an error communicating with your Plex Server.</div>');
|
||||
return
|
||||
@@ -159,10 +169,34 @@
|
||||
}
|
||||
|
||||
// if transcoding, update the transcode state
|
||||
var ts = '';
|
||||
if (s.video_decision == 'transcode' || s.audio_decision == 'transcode') {
|
||||
var throttled = (s.throttled == '1') ? ' (Throttled)' : '';
|
||||
$('#transcode-state-' + key).html('(Speed: ' + s.transcode_speed + ')' + throttled);
|
||||
ts += 'Stream <strong>Transcode (Speed: ' + s.transcode_speed + ')' + throttled + '</strong><br>';
|
||||
} else if (s.video_decision == 'copy' || s.audio_decision == 'copy') {
|
||||
ts += 'Stream <strong>Direct Stream</strong><br>';
|
||||
} else {
|
||||
ts += 'Stream <strong>Direct Play</strong><br>';
|
||||
}
|
||||
if (s.video_decision != '' && s.media_type != 'photo') {
|
||||
if (s.video_decision == 'transcode') {
|
||||
ts += 'Video <strong>Transcode (' + s.transcode_video_codec + ') (' + s.transcode_width + 'x' + s.transcode_height + ')</strong><br>';
|
||||
} else if (s.video_decision == 'copy') {
|
||||
ts += 'Video <strong>Direct Stream (' + s.transcode_video_codec + ') (' + s.width + 'x' + s.height + ')</strong><br>';
|
||||
} else {
|
||||
ts += 'Video <strong>Direct Play (' + s.video_codec + ') (' + s.width + 'x' + s.height + ')</strong><br>';
|
||||
}
|
||||
}
|
||||
if (s.audio_decision != '') {
|
||||
if (s.audio_decision == 'transcode') {
|
||||
ts += 'Audio <strong>Transcode (' + s.transcode_audio_codec + ') (' + s.transcode_audio_channels + 'ch)</strong>';
|
||||
} else if (s.audio_decision == 'copy') {
|
||||
ts += 'Audio <strong>Direct Stream (' + s.transcode_audio_codec + ') (' + s.transcode_audio_channels + 'ch)</strong>';
|
||||
} else {
|
||||
ts += 'Audio <strong>Direct Play (' + s.audio_codec + ') (' + s.audio_channels + 'ch)</strong>';
|
||||
}
|
||||
}
|
||||
$('#transcode-state-' + key).html(ts);
|
||||
|
||||
// update the stream progress times
|
||||
$('#stream-eta-' + key).html(moment().add(parseInt(s.duration) - parseInt(s.view_offset), 'milliseconds').format(time_format));
|
||||
|
@@ -68,6 +68,9 @@ DOCUMENTATION :: END
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<div class="art-face" style="background-image:url(pms_image_proxy?img=${data['art']}&width=1920&height=1080)"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image info-art" title="Refresh background image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="summary-container">
|
||||
<div class="summary-navbar">
|
||||
<div class="col-md-12">
|
||||
@@ -119,18 +122,27 @@ DOCUMENTATION :: END
|
||||
<span></span>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% elif data['media_type'] == 'artist' or data['media_type'] == 'album' or data['media_type'] == 'track':
|
||||
<div class="summary-poster-face-track" style="background-image: url(pms_image_proxy?img=${data['thumb']}&width=500&height=500&fallback=cover);">
|
||||
<div class="summary-poster-face-overlay">
|
||||
<span></span>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% else:
|
||||
<div class="summary-poster-face" style="background-image: url(pms_image_proxy?img=${data['thumb']}&width=300&height=450&fallback=poster);">
|
||||
<div class="summary-poster-face-overlay">
|
||||
<span></span>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% endif
|
||||
</a>
|
||||
</div>
|
||||
|
@@ -51,6 +51,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
% elif data['children_type'] == 'episode':
|
||||
@@ -63,6 +66,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="item-children-instance-text-wrapper episode-item">
|
||||
@@ -74,6 +80,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" title="${child['title']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
|
@@ -65,6 +65,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper season-item">
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
@@ -87,6 +90,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper season-item">
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
@@ -109,6 +115,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper season-item">
|
||||
<h3 title="${child['parent_title']}">${child['parent_title']}</h3>
|
||||
@@ -131,6 +140,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face episode-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=500&height=250&fallback=art);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper episode-item">
|
||||
<h3 title="${child['grandparent_title']}">${child['grandparent_title']}</h3>
|
||||
@@ -154,6 +166,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
@@ -175,6 +190,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
<h3 title="${child['parent_title']}">${child['parent_title']}</h3>
|
||||
@@ -204,6 +222,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
<h3 title="${child['grandparent_title']}">${child['grandparent_title']}</h3>
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
|
@@ -3,37 +3,56 @@
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="myModalLabel">
|
||||
% if data:
|
||||
<strong><span id="modal_header_ip_address">
|
||||
<i class="fa fa-spin fa-refresh"></i> Loading Details...
|
||||
% if data:
|
||||
<i class="fa fa-map-marker"></i> IP Address: ${data}
|
||||
% else:
|
||||
<i class="fa fa-exclamation-circle"></i> Invalid IP Address
|
||||
% endif
|
||||
</span></strong>
|
||||
% else:
|
||||
<i class="fa fa-exclamation-circle"></i> Invalid IP Address</span></strong>
|
||||
% endif
|
||||
</h4>
|
||||
</div>
|
||||
<div class="modal-body" id="modal-text">
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Location Details</strong><span id="ip_loading" style="padding-left: 5px;"><i class="fa fa-refresh fa-spin"></i></span></h4>
|
||||
</div>
|
||||
<div id="ip_error" class="col-sm-12 text-muted"></div>
|
||||
<div class="col-sm-6">
|
||||
<h4><strong>Location Details</strong></h4>
|
||||
<ul class="list-unstyled">
|
||||
<li>Continent: <strong><span id="continent"></span></strong></li>
|
||||
<li>Country: <strong><span id="country"></span></strong></li>
|
||||
<li>Region: <strong><span id="region"></span></strong></li>
|
||||
<li>City: <strong><span id="city"></span></strong></li>
|
||||
<li>Timezone: <strong><span id="timezone"></span></strong></li>
|
||||
<li>Latitude: <strong><span id="lat"></span></strong></li>
|
||||
<li>Longitude: <strong><span id="lon"></span></strong></li>
|
||||
<li>Postal Code: <strong><span id="postal_code"></span></strong></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-sm-6">
|
||||
<h4><strong>Connection Details</strong></h4>
|
||||
<ul class="list-unstyled">
|
||||
<li>Organization: <strong><span id="organization"></span></strong></li>
|
||||
<li>Timezone: <strong><span id="timezone"></span></strong></li>
|
||||
<li>Latitude: <strong><span id="latitude"></span></strong></li>
|
||||
<li>Longitude: <strong><span id="longitude"></span></strong></li>
|
||||
<li>Accuracy Radius: <strong><span id="accuracy"></span></strong></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Connection Details</strong><span id="isp_loading" style="padding-left: 5px;"><i class="fa fa-refresh fa-spin"></i></span></h4>
|
||||
</div>
|
||||
<div id="isp_error" class="col-sm-12 text-muted"></div>
|
||||
<div class="col-sm-12">
|
||||
<ul class="list-unstyled">
|
||||
<li>Host: <strong><span id="isp_host"></span></strong></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-sm-6" id="isp_instance">
|
||||
<ul class="list-unstyled">
|
||||
<li>ISP: <strong><span id="isp_name"></span></strong></li>
|
||||
<li>Address: <strong><span id="isp_address"></span></strong></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<% from plexpy.helpers import anon_url %>
|
||||
<span class="text-muted">Telize service written by <a href="${anon_url('https://github.com/fcambus/telize')}" target="_blank">Frederic Cambus</a>.</span>
|
||||
<span class="text-muted">GeoLite2 data created by <a href="${anon_url('http://www.maxmind.com')}" target="_blank">MaxMind</a>.</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -42,27 +61,79 @@
|
||||
<script>
|
||||
function getUserLocation(ip_address) {
|
||||
$.ajax({
|
||||
url: 'https://telize.myhtpc.co.za/geoip/' + ip_address,
|
||||
url: 'get_geoip_lookup',
|
||||
type: 'GET',
|
||||
data: { ip_address: ip_address },
|
||||
cache: true,
|
||||
async: true,
|
||||
type: 'GET',
|
||||
dataType: 'json',
|
||||
error: function(){
|
||||
$('#modal_header_ip_address').html("Request failed. Server may be too busy.");
|
||||
complete: function () {
|
||||
$('#ip_loading').remove();
|
||||
},
|
||||
success: function(data) {
|
||||
$('#modal_header_ip_address').html('<i class="fa fa-map-marker"></i> IP Address: ' + ip_address);
|
||||
$('#country').html(data.country);
|
||||
$('#city').html(data.city);
|
||||
$('#region').html(data.region);
|
||||
$('#timezone').html(data.timezone);
|
||||
$('#lat').html(data.latitude);
|
||||
$('#lon').html(data.longitude);
|
||||
$('#organization').html(data.organization);
|
||||
error: function () {
|
||||
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> Internal request failed.').show();
|
||||
},
|
||||
timeout: 5000
|
||||
success: function (data) {
|
||||
if ('error' in data) {
|
||||
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> ' + data.error).show();
|
||||
} else {
|
||||
$('#continent').html(data.continent);
|
||||
$('#country').html(data.country);
|
||||
$('#region').html(data.region);
|
||||
$('#city').html(data.city);
|
||||
$('#postal_code').html(data.postal_code);
|
||||
$('#timezone').html(data.timezone);
|
||||
$('#latitude').html(data.latitude);
|
||||
$('#longitude').html(data.longitude);
|
||||
$('#accuracy').html(data.accuracy + ' km');
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function getUserConnection(ip_address) {
|
||||
$.ajax({
|
||||
url: 'get_whois_lookup',
|
||||
type: 'GET',
|
||||
data: { ip_address: ip_address },
|
||||
cache: true,
|
||||
async: true,
|
||||
complete: function () {
|
||||
$('#isp_loading').remove();
|
||||
},
|
||||
error: function () {
|
||||
$('#isp_error').html('<i class="fa fa-exclamation-circle"></i> Internal request failed.').show();
|
||||
},
|
||||
success: function (data) {
|
||||
$('#isp_host').html(data.host);
|
||||
if ('error' in data) {
|
||||
$('#isp_error').html('<i class="fa fa-exclamation-circle"></i> ' + data.error).show();
|
||||
} else if (data.nets.length) {
|
||||
$('#isp_instance').remove();
|
||||
$.each(data.nets, function (index, net) {
|
||||
var s = '';
|
||||
if (net.city || net.state || net.postal_code) {
|
||||
s = (net.city && net.state) ? net.city + ', ' + net.state : net.city || net.state || '';
|
||||
s = (s && net.postal_code) ? s + ' ' + net.postal_code : s || net.postal_code || '';
|
||||
}
|
||||
s = (s) ? '<strong>' + s + '</strong><br />' : s;
|
||||
$('#modal-text').append('<div class="col-sm-6"> \
|
||||
<ul class="list-unstyled"> \
|
||||
<li>ISP: <strong>' + net.description + '</strong></li> \
|
||||
<li><span style="float: left;">Address: </span> \
|
||||
<span style="float: left;"><strong>' + net.address + '</strong><br />' + s +
|
||||
'<strong>' + net.country + '</strong></span> \
|
||||
</li> \
|
||||
</ul> \
|
||||
</div>')
|
||||
});
|
||||
} else {
|
||||
$('#isp_name, #isp_address').html("Not available");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
getUserLocation('${data}');
|
||||
getUserConnection('${data}');
|
||||
</script>
|
||||
% endif
|
@@ -54,9 +54,35 @@ function showMsg(msg, loader, timeout, ms, error) {
|
||||
}
|
||||
}
|
||||
|
||||
function doAjaxCall(url, elem, reload, form, callback) {
|
||||
function confirmAjaxCall(url, msg, loader_msg, callback) {
|
||||
$("#confirm-message").html(msg);
|
||||
$('#confirm-modal').modal();
|
||||
$('#confirm-modal').one('click', '#confirm-button', function () {
|
||||
if (loader_msg) {
|
||||
showMsg(loader_msg, true, false)
|
||||
}
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: 'POST',
|
||||
complete: function (xhr, status) {
|
||||
var result = $.parseJSON(xhr.responseText);
|
||||
var msg = result.message;
|
||||
if (result.result == 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000)
|
||||
} else {
|
||||
showMsg('<i class="fa fa-times"></i> ' + msg, false, true, 5000, true)
|
||||
}
|
||||
if (typeof callback === "function") {
|
||||
callback();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function doAjaxCall(url, elem, reload, form, showMsg, callback) {
|
||||
// Set Message
|
||||
feedback = $("#ajaxMsg");
|
||||
feedback = (showMsg) ? $("#ajaxMsg") : $();
|
||||
update = $("#updatebar");
|
||||
if (update.is(":visible")) {
|
||||
var height = update.height() + 35;
|
||||
@@ -399,4 +425,44 @@ window.onerror = function (message, file, line) {
|
||||
'line': line
|
||||
};
|
||||
$.post("log_js_errors", e, function (data) { });
|
||||
};
|
||||
};
|
||||
|
||||
$('*').on('click', '.refresh_pms_image', function (e) {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
|
||||
var background_div = $(this).parent().siblings(['style*=pms_image_proxy']).first();
|
||||
var pms_proxy_url = background_div.css('background-image');
|
||||
pms_proxy_url = /^url\((['"]?)(.*)\1\)$/.exec(pms_proxy_url);
|
||||
pms_proxy_url = pms_proxy_url ? pms_proxy_url[2] : ""; // If matched, retrieve url, otherwise ""
|
||||
|
||||
if (pms_proxy_url.indexOf('pms_image_proxy') == -1) {
|
||||
console.log('PMS image proxy url not found.');
|
||||
} else {
|
||||
if (pms_proxy_url.indexOf('refresh=true') > -1) {
|
||||
pms_proxy_url = pms_proxy_url.replace("&refresh=true", "");
|
||||
console.log(pms_proxy_url)
|
||||
background_div.css('background-image', 'url(' + pms_proxy_url + ')');
|
||||
background_div.css('background-image', 'url(' + pms_proxy_url + '&refresh=true)');
|
||||
} else {
|
||||
background_div.css('background-image', 'url(' + pms_proxy_url + '&refresh=true)');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Taken from http://stackoverflow.com/questions/10420352/converting-file-size-in-bytes-to-human-readable#answer-14919494
|
||||
function humanFileSize(bytes, si) {
|
||||
var thresh = si ? 1000 : 1024;
|
||||
if (Math.abs(bytes) < thresh) {
|
||||
return bytes + ' B';
|
||||
}
|
||||
var units = si
|
||||
? ['kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
|
||||
: ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'];
|
||||
var u = -1;
|
||||
do {
|
||||
bytes /= thresh;
|
||||
++u;
|
||||
} while (Math.abs(bytes) >= thresh && u < units.length - 1);
|
||||
return bytes.toFixed(1) + ' ' + units[u];
|
||||
}
|
@@ -35,7 +35,11 @@ history_table_options = {
|
||||
"targets": [0],
|
||||
"data": null,
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
$(td).html('<button class="btn btn-xs btn-warning" data-id="' + rowData['id'] + '"><i class="fa fa-trash-o fa-fw"></i> Delete</button>');
|
||||
if (rowData['id'] === null) {
|
||||
$(td).html('');
|
||||
} else {
|
||||
$(td).html('<button class="btn btn-xs btn-warning" data-id="' + rowData['id'] + '"><i class="fa fa-trash-o fa-fw"></i> Delete</button>');
|
||||
}
|
||||
},
|
||||
"width": "5%",
|
||||
"className": "delete-control no-wrap hidden",
|
||||
@@ -46,14 +50,21 @@ history_table_options = {
|
||||
"targets": [1],
|
||||
"data":"date",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (rowData['stopped'] === null) {
|
||||
$(td).html('Currently watching...');
|
||||
var date = moment(cellData, "X").format(date_format);
|
||||
if (rowData['state'] !== null) {
|
||||
var state = '';
|
||||
if (rowData['state'] === 'playing') {
|
||||
state = '<span class="current-activity-tooltip" data-toggle="tooltip" title="Currently Playing"><i class="fa fa-play fa-fw"></i></span>';
|
||||
} else if (rowData['state'] === 'paused') {
|
||||
state = '<span class="current-activity-tooltip" data-toggle="tooltip" title="Currently Paused"><i class="fa fa-pause fa-fw"></i></span>';
|
||||
} else if (rowData['state'] === 'buffering') {
|
||||
state = '<span class="current-activity-tooltip" data-toggle="tooltip" title="Currently Buffering"><i class="fa fa-spinner fa-fw"></i></span>';
|
||||
}
|
||||
$(td).html('<div><div style="float: left;">' + state + ' ' + date + '</div></div>');
|
||||
} else if (rowData['group_count'] > 1) {
|
||||
date = moment(cellData, "X").format(date_format);
|
||||
expand_history = '<span class="expand-history-tooltip" data-toggle="tooltip" title="Show Detailed History"><i class="fa fa-plus-circle fa-fw"></i></span>';
|
||||
$(td).html('<div><a href="#"><div style="float: left;">' + expand_history + ' ' + date + '</div></a></div>');
|
||||
} else {
|
||||
date = moment(cellData, "X").format(date_format);
|
||||
$(td).html('<div style="float: left;"><i class="fa fa-fw"></i> ' + date + '</div>');
|
||||
}
|
||||
},
|
||||
@@ -138,21 +149,22 @@ history_table_options = {
|
||||
var parent_info = '';
|
||||
var media_type = '';
|
||||
var thumb_popover = '';
|
||||
var source = (rowData['state'] === null) ? 'source=history&' : '';
|
||||
if (rowData['media_type'] === 'movie') {
|
||||
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
$(td).html('<div class="history-title"><a href="info?' + source + 'rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'episode') {
|
||||
if (rowData['parent_media_index'] && rowData['media_index']) { parent_info = ' (S' + rowData['parent_media_index'] + '· E' + rowData['media_index'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
$(td).html('<div class="history-title"><a href="info?' + source + 'rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'track') {
|
||||
if (rowData['parent_title']) { parent_info = ' (' + rowData['parent_title'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=cover" data-height="80" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
$(td).html('<div class="history-title"><a href="info?' + source + 'rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else {
|
||||
$(td).html('<a href="info?rating_key=' + rowData['rating_key'] + '">' + cellData + '</a>');
|
||||
}
|
||||
@@ -241,6 +253,7 @@ history_table_options = {
|
||||
$('#ajaxMsg').fadeOut();
|
||||
|
||||
// Create the tooltips.
|
||||
$('.current-activity-tooltip').tooltip({ container: 'body' });
|
||||
$('.expand-history-tooltip').tooltip({ container: 'body' });
|
||||
$('.external-ip-tooltip').tooltip({ container: 'body' });
|
||||
$('.transcode-tooltip').tooltip({ container: 'body' });
|
||||
@@ -286,7 +299,7 @@ history_table_options = {
|
||||
if ($.inArray(rowData['id'], history_to_delete) !== -1) {
|
||||
$(row).find('button[data-id="' + rowData['id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
|
||||
}
|
||||
} else {
|
||||
} else if (rowData['id'] !== null) {
|
||||
// if grouped rows
|
||||
// toggle the parent button to danger
|
||||
$(row).find('button[data-id="' + rowData['id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
|
||||
@@ -306,6 +319,9 @@ history_table_options = {
|
||||
history_table.row(row).child(childTableFormat(rowData)).show();
|
||||
}
|
||||
|
||||
if (rowData['state'] !== null) {
|
||||
$(row).addClass('current-activity-row');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -318,7 +334,11 @@ $('.history_table').on('click', '> tbody > tr > td.modal-control', function () {
|
||||
function showStreamDetails() {
|
||||
$.ajax({
|
||||
url: 'get_stream_data',
|
||||
data: {row_id: rowData['id'], user: rowData['friendly_name']},
|
||||
data: {
|
||||
row_id: rowData['id'],
|
||||
session_key: rowData['session_key'],
|
||||
user: rowData['friendly_name']
|
||||
},
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
|
@@ -4,7 +4,7 @@ var time_format = 'hh:mm a';
|
||||
$.ajax({
|
||||
url: 'get_date_formats',
|
||||
type: 'GET',
|
||||
success: function(data) {
|
||||
success: function (data) {
|
||||
date_format = data.date_format;
|
||||
time_format = data.time_format;
|
||||
}
|
||||
@@ -16,10 +16,10 @@ media_info_table_options = {
|
||||
"destroy": true,
|
||||
"language": {
|
||||
"search": "Search: ",
|
||||
"lengthMenu":"Show _MENU_ entries per page",
|
||||
"info":"Showing _START_ to _END_ of _TOTAL_ library items",
|
||||
"infoEmpty":"Showing 0 to 0 of 0 entries",
|
||||
"infoFiltered":"<span class='hidden-md hidden-sm hidden-xs'>(filtered from _MAX_ total entries)</span>",
|
||||
"lengthMenu": "Show _MENU_ entries per page",
|
||||
"info": "Showing _START_ to _END_ of _TOTAL_ library items",
|
||||
"infoEmpty": "Showing 0 to 0 of 0 entries",
|
||||
"infoFiltered": "<span class='hidden-md hidden-sm hidden-xs'>(filtered from _MAX_ total entries)</span>",
|
||||
"emptyTable": "No data in table",
|
||||
"loadingRecords": '<i class="fa fa-refresh fa-spin"></i> Loading items...</div>'
|
||||
},
|
||||
@@ -28,7 +28,7 @@ media_info_table_options = {
|
||||
"processing": false,
|
||||
"serverSide": true,
|
||||
"pageLength": 25,
|
||||
"order": [ 1, 'asc'],
|
||||
"order": [1, 'asc'],
|
||||
"autoWidth": false,
|
||||
"scrollX": true,
|
||||
"columnDefs": [
|
||||
@@ -110,7 +110,7 @@ media_info_table_options = {
|
||||
},
|
||||
"width": "20%",
|
||||
"className": "no-wrap",
|
||||
},
|
||||
},
|
||||
{
|
||||
"targets": [2],
|
||||
"data": "container",
|
||||
@@ -194,7 +194,7 @@ media_info_table_options = {
|
||||
"data": "file_size",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== null && cellData !== '') {
|
||||
$(td).html(Math.round(cellData / Math.pow(1024, 2)).toString() + ' MiB');
|
||||
$(td).html(humanFileSize(cellData));
|
||||
} else {
|
||||
if (rowData['section_type'] != 'photo' && get_file_sizes != null) {
|
||||
get_file_sizes = true;
|
||||
@@ -280,10 +280,10 @@ media_info_table_options = {
|
||||
}
|
||||
|
||||
$("#media_info_table-SID-" + section_id + "_info").append('<span class="hidden-md hidden-sm hidden-xs"> with a total file size of ' +
|
||||
Math.round(settings.json.filtered_file_size / Math.pow(1024, 3)).toString() + ' GiB' +
|
||||
' (filtered from ' + Math.round(settings.json.total_file_size / Math.pow(1024, 3)).toString() + ' GiB)</span>');
|
||||
humanFileSize(settings.json.filtered_file_size) +
|
||||
' (filtered from ' + humanFileSize(settings.json.total_file_size) + ')</span>');
|
||||
},
|
||||
"preDrawCallback": function(settings) {
|
||||
"preDrawCallback": function (settings) {
|
||||
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||
showMsg(msg, false, false, 0)
|
||||
},
|
||||
@@ -425,17 +425,17 @@ function childTableFormatMedia(rowData) {
|
||||
'<table id="media_info_child-' + rowData['rating_key'] + '" data-id="' + rowData['rating_key'] + '" width="100%">' +
|
||||
'<thead>' +
|
||||
'<tr>' +
|
||||
'<th align="left" id="added_at">Added At</th>' +
|
||||
'<th align="left" id="title">Title</th>' +
|
||||
'<th align="left" id="container">Container</th>' +
|
||||
'<th align="left" id="bitrate">Bitrate</th>' +
|
||||
'<th align="left" id="video_codec">Video Codec</th>' +
|
||||
'<th align="left" id="video_resolution">Video Resolution</th>' +
|
||||
'<th align="left" id="video_resolution">Video Framerate</th>' +
|
||||
'<th align="left" id="audio_codec">Audio Codec</th>' +
|
||||
'<th align="left" id="audio_channels">Audio Channels</th>' +
|
||||
'<th align="left" id="file_size">File Size</th>' +
|
||||
'<th align="left" id="last_played">Last Played</th>' +
|
||||
'<th align="left" id="added_at">Added At</th>' +
|
||||
'<th align="left" id="title">Title</th>' +
|
||||
'<th align="left" id="container">Container</th>' +
|
||||
'<th align="left" id="bitrate">Bitrate</th>' +
|
||||
'<th align="left" id="video_codec">Video Codec</th>' +
|
||||
'<th align="left" id="video_resolution">Video Resolution</th>' +
|
||||
'<th align="left" id="video_resolution">Video Framerate</th>' +
|
||||
'<th align="left" id="audio_codec">Audio Codec</th>' +
|
||||
'<th align="left" id="audio_channels">Audio Channels</th>' +
|
||||
'<th align="left" id="file_size">File Size</th>' +
|
||||
'<th align="left" id="last_played">Last Played</th>' +
|
||||
'<th align="left" id="total_plays">Total Plays</th>' +
|
||||
'</tr>' +
|
||||
'</thead>' +
|
||||
|
@@ -39,6 +39,9 @@ DOCUMENTATION :: END
|
||||
<div class="row">
|
||||
% if data['library_art']:
|
||||
<div class="art-face" style="background-image:url(pms_image_proxy?img=${data['library_art']}&width=1920&height=1080)"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image info-art" title="Refresh background image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% endif
|
||||
<div class="summary-container">
|
||||
<div class="summary-navbar">
|
||||
@@ -362,7 +365,7 @@ DOCUMENTATION :: END
|
||||
|
||||
// Populate watch time stats
|
||||
$.ajax({
|
||||
url: 'get_library_watch_time_stats',
|
||||
url: 'library_watch_time_stats',
|
||||
async: true,
|
||||
data: { section_id: section_id },
|
||||
complete: function(xhr, status) {
|
||||
@@ -372,7 +375,7 @@ DOCUMENTATION :: END
|
||||
|
||||
// Populate user stats
|
||||
$.ajax({
|
||||
url: 'get_library_user_stats',
|
||||
url: 'library_user_stats',
|
||||
async: true,
|
||||
data: { section_id: section_id },
|
||||
complete: function(xhr, status) {
|
||||
@@ -498,7 +501,7 @@ DOCUMENTATION :: END
|
||||
function recentlyWatched() {
|
||||
// Populate recently watched
|
||||
$.ajax({
|
||||
url: 'get_library_recently_watched',
|
||||
url: 'library_recently_watched',
|
||||
async: true,
|
||||
data: {
|
||||
section_id: section_id,
|
||||
@@ -514,7 +517,7 @@ DOCUMENTATION :: END
|
||||
function recentlyAdded() {
|
||||
// Populate recently added
|
||||
$.ajax({
|
||||
url: 'get_library_recently_added',
|
||||
url: 'library_recently_added',
|
||||
async: true,
|
||||
data: {
|
||||
section_id: section_id,
|
||||
|
@@ -60,6 +60,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
|
@@ -21,7 +21,33 @@
|
||||
<span><i class="fa fa-list-alt"></i> Logs</span>
|
||||
</div>
|
||||
<div class="button-bar">
|
||||
<button class="btn btn-dark" id="download-plexpylog"><i class="fa fa-download"></i> Download log</button>
|
||||
<div class="btn-group" id="plexpy-log-levels">
|
||||
<label>
|
||||
<select name="plexpy-log-level-filter" id="plexpy-log-level-filter" class="btn" style="color: inherit;">
|
||||
<option value="">All log levels</option>
|
||||
<option disabled>────────────</option>
|
||||
<option value="DEBUG">Debug</option>
|
||||
<option value="INFO">Info</option>
|
||||
<option value="WARNING">Warning</option>
|
||||
<option value="ERROR">Error</option>
|
||||
</select>
|
||||
</label>
|
||||
</div>
|
||||
<div class="btn-group" id="plex-log-levels" style="display: none;">
|
||||
<label>
|
||||
<select name="plex-log-level-filter" id="plex-log-level-filter" class="btn" style="color: inherit;">
|
||||
<option value="">All log levels</option>
|
||||
<option disabled>────────────</option>
|
||||
<option value="DEBUG">Debug</option>
|
||||
<option value="INFO">Info</option>
|
||||
<option value="WARN">Warning</option>
|
||||
<option value="ERROR">Error</option>
|
||||
</select>
|
||||
</label>
|
||||
</div>
|
||||
<button class="btn btn-dark" id="download-plexpylog"><i class="fa fa-download"></i> Download logs</button>
|
||||
<button class="btn btn-dark" id="download-plexserverlog" style="display: none;"><i class="fa fa-download"></i> Download logs</button>
|
||||
<button class="btn btn-dark" id="download-plexscannerlog" style="display: none;"><i class="fa fa-download"></i> Download logs</button>
|
||||
<button class="btn btn-dark" id="clear-logs"><i class="fa fa-trash-o"></i> Clear logs</button>
|
||||
<button class="btn btn-dark" id="clear-notify-logs" style="display: none;"><i class="fa fa-trash-o"></i> Clear logs</button>
|
||||
<button class="btn btn-dark" id="clear-login-logs" style="display: none;"><i class="fa fa-trash-o"></i> Clear logs</button>
|
||||
@@ -40,27 +66,25 @@
|
||||
<div role="tabpanel" class="tab-pane active" id="tabs-1">
|
||||
<table class="display" id="log_table" width="100%">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="min-tablet" align="left" id="timestamp">Timestamp</th>
|
||||
<th class="desktop" align="left" id="level">Level</th>
|
||||
<th class="all" align="left" id="message">Message</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th class="min-tablet" align="left" id="timestamp">Timestamp</th>
|
||||
<th class="desktop" align="left" id="level">Level</th>
|
||||
<th class="all" align="left" id="message">Message</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
</tbody>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-2">
|
||||
<table class="display" id="plex_log_table" width="100%">
|
||||
<thead>
|
||||
<tr>
|
||||
<th align="left" id="plex_timestamp">Timestamp</th>
|
||||
<th align="left" id="plex_level">Level</th>
|
||||
<th align="left" id="plex_message">Message</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th align="left" id="plex_timestamp">Timestamp</th>
|
||||
<th align="left" id="plex_level">Level</th>
|
||||
<th align="left" id="plex_message">Message</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
</tbody>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-3">
|
||||
@@ -114,7 +138,8 @@
|
||||
</div>
|
||||
|
||||
<br>
|
||||
<div align="center">Refresh rate:
|
||||
<div align="center">
|
||||
Refresh rate:
|
||||
<select id="refreshrate" onchange="setRefresh()">
|
||||
<option value="0" selected="selected">No Refresh</option>
|
||||
<option value="5">5 Seconds</option>
|
||||
@@ -139,21 +164,62 @@
|
||||
<script>
|
||||
|
||||
$(document).ready(function() {
|
||||
loadPlexPyLogs();
|
||||
loadPlexPyLogs(selected_log_level);
|
||||
clearSearchButton('log_table', log_table);
|
||||
});
|
||||
|
||||
function loadPlexPyLogs() {
|
||||
var log_levels = ['DEBUG', 'INFO', 'WARN', 'ERROR'];
|
||||
|
||||
function bindLogLevelFilter() {
|
||||
clearLogLevelFilter();
|
||||
var log_level_column = this.api().column(1);
|
||||
var select = $('#plex-log-level-filter');
|
||||
select.on('change', function () {
|
||||
var val = $.fn.dataTable.util.escapeRegex(
|
||||
$(this).val()
|
||||
);
|
||||
var search_string = '';
|
||||
var levelIndex = log_levels.indexOf(val);
|
||||
if (levelIndex >= 0) {
|
||||
search_string = '^' + log_levels
|
||||
.slice(levelIndex)
|
||||
.join('|') + '$';
|
||||
}
|
||||
log_level_column
|
||||
.search(search_string, true, false)
|
||||
.draw();
|
||||
}).change();
|
||||
}
|
||||
|
||||
function clearLogLevelFilter() {
|
||||
$('#plex-log-level-filter').off('change');
|
||||
}
|
||||
|
||||
var selected_log_level = null;
|
||||
function loadPlexPyLogs(selected_log_level) {
|
||||
log_table_options.ajax = {
|
||||
url: "getLog"
|
||||
url: "get_log",
|
||||
type: 'post',
|
||||
data: function (d) {
|
||||
return {
|
||||
json_data: JSON.stringify(d),
|
||||
log_level: selected_log_level
|
||||
};
|
||||
}
|
||||
}
|
||||
log_table = $('#log_table').DataTable(log_table_options);
|
||||
|
||||
$('#plexpy-log-level-filter').on('change', function () {
|
||||
selected_log_level = $(this).val() || null;
|
||||
log_table.draw();
|
||||
});
|
||||
}
|
||||
|
||||
function loadPlexLogs() {
|
||||
plex_log_table_options.ajax = {
|
||||
url: "get_plex_log?log_type=server"
|
||||
}
|
||||
plex_log_table_options.initComplete = bindLogLevelFilter;
|
||||
plex_log_table = $('#plex_log_table').DataTable(plex_log_table_options);
|
||||
}
|
||||
|
||||
@@ -161,6 +227,7 @@
|
||||
plex_log_table_options.ajax = {
|
||||
url: "get_plex_log?log_type=scanner"
|
||||
}
|
||||
plex_log_table_options.initComplete = bindLogLevelFilter;
|
||||
plex_scanner_log_table = $('#plex_scanner_log_table').DataTable(plex_log_table_options);
|
||||
}
|
||||
|
||||
@@ -190,17 +257,25 @@
|
||||
}
|
||||
|
||||
$("#plexpy-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").show();
|
||||
$("#plex-log-levels").hide();
|
||||
$("#clear-logs").show();
|
||||
$("#download-plexpylog").show()
|
||||
$("#download-plexserverlog").hide()
|
||||
$("#download-plexscannerlog").hide()
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadPlexPyLogs();
|
||||
loadPlexPyLogs(selected_log_level);
|
||||
clearSearchButton('log_table', log_table);
|
||||
});
|
||||
|
||||
$("#plex-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").hide();
|
||||
$("#plex-log-levels").show();
|
||||
$("#clear-logs").hide();
|
||||
$("#download-plexpylog").hide()
|
||||
$("#download-plexserverlog").show()
|
||||
$("#download-plexscannerlog").hide()
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadPlexLogs();
|
||||
@@ -208,8 +283,12 @@
|
||||
});
|
||||
|
||||
$("#plex-scanner-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").hide();
|
||||
$("#plex-log-levels").show();
|
||||
$("#clear-logs").hide();
|
||||
$("#download-plexpylog").hide()
|
||||
$("#download-plexserverlog").hide()
|
||||
$("#download-plexscannerlog").show()
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadPlexScannerLogs();
|
||||
@@ -217,8 +296,12 @@
|
||||
});
|
||||
|
||||
$("#notification-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").hide();
|
||||
$("#plex-log-levels").hide();
|
||||
$("#clear-logs").hide();
|
||||
$("#download-plexpylog").hide()
|
||||
$("#download-plexserverlog").hide()
|
||||
$("#download-plexscannerlog").hide()
|
||||
$("#clear-notify-logs").show();
|
||||
$("#clear-login-logs").hide();
|
||||
loadNotificationLogs();
|
||||
@@ -226,8 +309,12 @@
|
||||
});
|
||||
|
||||
$("#login-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").hide();
|
||||
$("#plex-log-levels").hide();
|
||||
$("#clear-logs").hide();
|
||||
$("#download-plexpylog").hide()
|
||||
$("#download-plexserverlog").hide()
|
||||
$("#download-plexscannerlog").hide()
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-login-logs").show();
|
||||
loadLoginLogs();
|
||||
@@ -263,6 +350,13 @@
|
||||
window.location.href = "download_log";
|
||||
});
|
||||
|
||||
$("#download-plexserverlog").click(function () {
|
||||
window.location.href = "download_plex_log?log_type=server";
|
||||
});
|
||||
|
||||
$("#download-plexscannerlog").click(function () {
|
||||
window.location.href = "download_plex_log?log_type=scanner";
|
||||
});
|
||||
|
||||
$("#clear-notify-logs").click(function () {
|
||||
$("#confirm-message").text("Are you sure you want to clear the PlexPy notification logs?");
|
||||
|
@@ -148,7 +148,7 @@
|
||||
|
||||
$('#save-notification-item').click(function () {
|
||||
// Reload modal to update certain fields
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true, reloadModal);
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true, true, reloadModal);
|
||||
return false;
|
||||
});
|
||||
|
||||
@@ -176,7 +176,7 @@
|
||||
})
|
||||
|
||||
$('#test_notifier').click(function () {
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true, sendTestNotification);
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true, false, sendTestNotification);
|
||||
});
|
||||
|
||||
function sendTestNotification() {
|
||||
@@ -219,7 +219,7 @@
|
||||
|
||||
$('#pushbullet_apikey, #pushover_apitoken, #scripts_folder, #join_apikey').on('change', function () {
|
||||
// Reload modal to update certain fields
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true, reloadModal);
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true, false, reloadModal);
|
||||
return false;
|
||||
});
|
||||
|
||||
|
@@ -57,6 +57,20 @@
|
||||
</label>
|
||||
<p class="help-block">Trigger notification when a media item triggers the defined buffer threshold.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_concurrent" ${helpers.checked(data['on_concurrent'])} class="toggle-switches">
|
||||
Notify on user concurrent streams
|
||||
</label>
|
||||
<p class="help-block">Trigger notification when a user has concurrent streams.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_newdevice" ${helpers.checked(data['on_newdevice'])} class="toggle-switches">
|
||||
Notify on user new device
|
||||
</label>
|
||||
<p class="help-block">Trigger notification when a user streams from a new device.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_created" ${helpers.checked(data['on_created'])} class="toggle-switches">
|
||||
|
@@ -49,6 +49,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
@@ -69,6 +72,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
@@ -91,6 +97,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
|
@@ -4,7 +4,7 @@
|
||||
import sys
|
||||
|
||||
import plexpy
|
||||
from plexpy import common, logger, notifiers, versioncheck
|
||||
from plexpy import common, notifiers, versioncheck
|
||||
from plexpy.helpers import anon_url
|
||||
|
||||
available_notification_agents = sorted(notifiers.available_notification_agents(), key=lambda k: k['name'])
|
||||
@@ -62,78 +62,10 @@
|
||||
<div class="padded-header">
|
||||
<h3>PlexPy Configuration</h3>
|
||||
</div>
|
||||
<table class="config-info-table small-muted">
|
||||
<tbody>
|
||||
% if plexpy.CURRENT_VERSION:
|
||||
<tr>
|
||||
<td>Git Branch:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/tree/%s' % plexpy.CONFIG.GIT_BRANCH)}">${plexpy.CONFIG.GIT_BRANCH}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Git Commit Hash:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/commit/%s' % plexpy.CURRENT_VERSION)}">${plexpy.CURRENT_VERSION}</a></td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Configuration File:</td>
|
||||
<td>${plexpy.CONFIG_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Database File:</td>
|
||||
<td>${plexpy.DB_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Log File:</td>
|
||||
<td><a class="no-highlight" href="logFile" target="_blank">${os.path.join(config['log_dir'], logger.FILENAME)}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Backup Directory:</td>
|
||||
<td>${config['backup_dir']}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Cache Directory:</td>
|
||||
<td>${config['cache_dir']}</td>
|
||||
</tr>
|
||||
% if plexpy.ARGS:
|
||||
<tr>
|
||||
<td>Arguments:</td>
|
||||
<td>${plexpy.ARGS}</td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Platform:</td>
|
||||
<td>${common.PLATFORM} ${common.PLATFORM_VERSION}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Python Version:</td>
|
||||
<td>${sys.version}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="top-line">Plex Forums:</td>
|
||||
<td class="top-line"><a class="no-highlight" href="${anon_url('https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program')}" target="_blank">https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Source:</td>
|
||||
<td><a id="source-link" class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy')}" target="_blank">https://github.com/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Wiki:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/drzoidberg33/plexpy/wiki')}" target="_blank">https://github.com/drzoidberg33/plexpy/wiki</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Issues:</td>
|
||||
<td><a class="no-highlight guidelines-modal-link" href="${anon_url('https://github.com/drzoidberg33/plexpy/issues')}" data-id="issue">https://github.com/drzoidberg33/plexpy/issues</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Feature Requests:</td>
|
||||
<td><a class="no-highlight guidelines-modal-link" href="${anon_url('http://feathub.com/drzoidberg33/plexpy')}" data-id="feature request">http://feathub.com/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Gitter Chat:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://gitter.im/drzoidberg33/plexpy')}" target="_blank">https://gitter.im/drzoidberg33/plexpy</a></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<div id="plexpy-configuration-table">
|
||||
<div class='text-muted'><i class="fa fa-refresh fa-spin"></i> Loading configuration table...</div>
|
||||
<br>
|
||||
</div>
|
||||
<div class="padded-header">
|
||||
<h3>PlexPy Scheduled Tasks</h3>
|
||||
</div>
|
||||
@@ -191,6 +123,49 @@
|
||||
</label>
|
||||
<p class="help-block">Group successive play history by the same user as a single entry in the tables and watch statistics.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="history_table_activity" name="history_table_activity" value="1" ${config['history_table_activity']}> Current Activity in History Tables
|
||||
</label>
|
||||
<p class="help-block">Include current activity in the history tables. Statistics will not be counted until the stream has ended.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="notify_watched_percent">Watched Percent</label>
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<input type="text" class="form-control" data-parsley-type="integer" id="notify_watched_percent" name="notify_watched_percent" value="${config['notify_watched_percent']}" size="5" data-parsley-range="[50,95]" data-parsley-trigger="change" data-parsley-errors-container="#notify_watched_percent_error" required>
|
||||
</div>
|
||||
<div id="notify_watched_percent_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">Set the percentage for a media item to be considered as watched. Minimum 50, Maximum 95.</p>
|
||||
</div>
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Backup</h3>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="backup_interval">Backup Interval</label>
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<input type="text" class="form-control" data-parsley-type="integer" id="backup_interval" name="backup_interval" value="${config['backup_interval']}" size="5" data-parsley-range="[1,24]" data-parsley-trigger="change" data-parsley-errors-container="#backup_interval_error" required>
|
||||
</div>
|
||||
<div id="backup_interval_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">The interval (in hours) PlexPy will backup the database and configuration file. Minimum 1, maximum 24, default 6.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="backup_interval">Backup Days</label>
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<input type="text" class="form-control" data-parsley-type="integer" id="backup_days" name="backup_days" value="${config['backup_days']}" size="5" data-parsley-min="1" data-parsley-trigger="change" data-parsley-errors-container="#backup_days_error" required>
|
||||
</div>
|
||||
<div id="backup_days_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
The number of days to keep scheduled backups. Minimum 1, default 3.<br />
|
||||
Note: Manual backups are not removed automatically.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Directories</h3>
|
||||
@@ -370,7 +345,7 @@
|
||||
</div>
|
||||
<div id="home_stats_count_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">Specify the number of items to show in the top lists for the watch statistics on the home page. Max is 10 items, default is 5 items, 0 to disable.</p>
|
||||
<p class="help-block">Specify the number of items to show in the top lists for the watch statistics on the home page. Maximum 10 items, default 5 items, 0 to disable.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
@@ -566,7 +541,7 @@
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<div class="input-group">
|
||||
<input class="form-control" type="text" name="api_key" id="api_key" value="${config['api_key']}" size="20">
|
||||
<input class="form-control" type="text" name="api_key" id="api_key" value="${config['api_key']}" size="20" disabled>
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-form" type="button" id="generate_api">Generate</button>
|
||||
</span>
|
||||
@@ -581,15 +556,32 @@
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-5">
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Plex Media Server <small style="color: #fff;">Version <span id="pms_version">unknown</span></small></h3>
|
||||
<h3>Plex Media Server <small style="color: #fff;">Version <span id="pms_version">${config['pms_version']}</span></small></h3>
|
||||
</div>
|
||||
<p class="help-block">If you're using websocket monitoring, any server changes require a restart of PlexPy.</p>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="monitor_pms_updates" name="monitor_pms_updates" value="1" ${config['monitor_pms_updates']}> Monitor Plex Updates
|
||||
</label>
|
||||
<p class="help-block">Enable to have PlexPy check if updates are available for the Plex Media Server.<br />
|
||||
Note: The Plex updater is broken on certain Plex Pass version of Plex Media Server. PlexPy will automatically disable checking for Plex updates if one of these versions is found.</p>
|
||||
<p class="help-block">Enable to have PlexPy check if updates are available for the Plex Media Server.</p>
|
||||
</div>
|
||||
<div id="pms_update_options">
|
||||
<div class="form-group">
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<label for="pms_update_channel">Update Channel</label>
|
||||
<select class="form-control" id="pms_update_channel" name="pms_update_channel">
|
||||
<option value="public">Public</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col-md-5">
|
||||
<label for="pms_update_distro_build">Release</label>
|
||||
<select class="form-control" id="pms_update_distro_build" name="pms_update_distro_build">
|
||||
</select>
|
||||
<input type="hidden" class="form-control" id="pms_update_distro" name="pms_update_distro">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
@@ -692,7 +684,7 @@
|
||||
</div>
|
||||
<div id="refresh_users_interval_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">The interval (in hours) PlexPy will request an updated friends list from Plex.tv. 1 minimum, 24 maximum.</p>
|
||||
<p class="help-block">The interval (in hours) PlexPy will request an updated friends list from Plex.tv. Minimum 1, maximum 24, default 12.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
@@ -712,7 +704,7 @@
|
||||
</div>
|
||||
<div id="refresh_libraries_interval_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">The interval (in hours) PlexPy will request an updated libraries list from your Plex Media Server. 1 minimum, 24 maximum.</p>
|
||||
<p class="help-block">The interval (in hours) PlexPy will request an updated libraries list from your Plex Media Server. Minimum 1, maximum 24, default 12.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
@@ -767,6 +759,20 @@
|
||||
</div>
|
||||
<p class="help-block">Backlink protection via anonymizer service, must end in "?".</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label>Flush Temporary Sessions</label>
|
||||
<p class="help-block">
|
||||
Attempt to fix hisotry logging by flushing out all of the temporary sessions in the database.<br />
|
||||
Warning: This will reset all currently active sessions. For emergeny use only when history logging is stuck!
|
||||
</p>
|
||||
<div class="row">
|
||||
<div class="col-md-4">
|
||||
<div class="btn-group">
|
||||
<button class="btn btn-form" type="button" id="delete_temp_sessions">Flush</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Database Import Tool</h3>
|
||||
@@ -792,7 +798,7 @@
|
||||
</div>
|
||||
<div id="monitoring_interval_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">The interval (in seconds) PlexPy will ping your Plex Server. Min 30 seconds, recommended 60 seconds.</p>
|
||||
<p class="help-block">The interval (in seconds) PlexPy will ping your Plex Server. Minimum 30 seconds, recommended 60 seconds.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
@@ -918,22 +924,28 @@
|
||||
<h3>Current Activity Notifications</h3>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="notify_watched_percent">Watched Percent</label>
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<input type="text" class="form-control" data-parsley-type="integer" id="notify_watched_percent" name="notify_watched_percent" value="${config['notify_watched_percent']}" size="5" data-parsley-range="[50,95]" data-parsley-trigger="change" data-parsley-errors-container="#notify_watched_percent_error" required>
|
||||
</div>
|
||||
<div id="notify_watched_percent_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">Set the progress percentage of when a watched notification should be triggered. Minimum 50, Maximum 95.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" name="notify_consecutive" id="notify_consecutive" value="1" ${config['notify_consecutive']}> Allow Consecutive Notifications
|
||||
</label>
|
||||
<p class="help-block">Disable to prevent consecutive notifications (i.e. both watched & stopped notifications).</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" name="notify_concurrent_by_ip" id="notify_concurrent_by_ip" value="1" ${config['notify_concurrent_by_ip']}> User Concurrent Streams Notifications by IP Address
|
||||
</label>
|
||||
<p class="help-block">Enable to only get notified of concurrent streams by a single user from different IP addresses.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="notify_concurrent_threshold">User Concurrent Stream Threshold</label>
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<input type="text" class="form-control" data-parsley-type="integer" id="notify_concurrent_threshold" name="notify_concurrent_threshold" value="${config['notify_concurrent_threshold']}" data-parsley-min="2" data-parsley-trigger="change" data-parsley-errors-container="#notify_concurrent_threshold_error" required>
|
||||
</div>
|
||||
<div id="notify_concurrent_threshold_error" class="alert alert-danger settings-alert" role="alert"></div>
|
||||
</div>
|
||||
<p class="help-block">The number of concurrent streams by a single user for PlexPy to trigger a notification. Minimum 2.</p>
|
||||
</div>
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>Recently Added Notifications</h3>
|
||||
@@ -1078,6 +1090,40 @@
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<div class="link"><i class="fa fa-arrow-circle-o-right fa-fw"></i> User Concurrent Streams<i class="fa fa-chevron-down"></i></div>
|
||||
<ul class="submenu">
|
||||
<li>
|
||||
<div class="form-group">
|
||||
<label for="notify_on_concurrent_subject_text">Subject Line</label>
|
||||
<input class="form-control" type="text" id="notify_on_concurrent_subject_text" name="notify_on_concurrent_subject_text" value="${config['notify_on_concurrent_subject_text']}" data-parsley-trigger="change" required>
|
||||
<p class="help-block">Set a custom subject line.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="notify_on_buffer_body_text">Message Body</label>
|
||||
<textarea class="form-control" id="notify_on_concurrent_body_text" name="notify_on_concurrent_body_text" data-parsley-trigger="change" data-autoresize required>${config['notify_on_concurrent_body_text']}</textarea>
|
||||
<p class="help-block">Set a custom body.</p>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<div class="link"><i class="fa fa-desktop fa-fw"></i> User New Device<i class="fa fa-chevron-down"></i></div>
|
||||
<ul class="submenu">
|
||||
<li>
|
||||
<div class="form-group">
|
||||
<label for="notify_on_newdevice_subject_text">Subject Line</label>
|
||||
<input class="form-control" type="text" id="notify_on_newdevice_subject_text" name="notify_on_newdevice_subject_text" value="${config['notify_on_newdevice_subject_text']}" data-parsley-trigger="change" required>
|
||||
<p class="help-block">Set a custom subject line.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="notify_on_buffer_body_text">Message Body</label>
|
||||
<textarea class="form-control" id="notify_on_newdevice_body_text" name="notify_on_newdevice_body_text" data-parsley-trigger="change" data-autoresize required>${config['notify_on_newdevice_body_text']}</textarea>
|
||||
<p class="help-block">Set a custom body.</p>
|
||||
</div>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
<ul id="accordion-timeline" class="accordion list-unstyled">
|
||||
<li>
|
||||
@@ -1217,11 +1263,7 @@
|
||||
% else:
|
||||
<a href="javascript:void(0)" data-target="#notification-triggers-modal" data-id="${agent['id']}" class="toggle-notification-triggers-modal toggle-left" data-toggle="modal"><i class="fa fa-lg fa-bell"></i></a>
|
||||
% endif
|
||||
% if agent['id'] == 17:
|
||||
${agent['name']} <span style="color: #eb8600; padding-left: 10px;">[experimental]</span>
|
||||
% else:
|
||||
${agent['name']}
|
||||
% endif
|
||||
% if agent['has_config']:
|
||||
<a href="javascript:void(0)" rel="tooltip" data-target="#notification-config-modal" data-placement="top" title data-title="Open configuration" data-id="${agent['id']}" class="toggle-notification-config-modal toggle-right" data-toggle="modal"><i class="fa fa-lg fa-cog"></i></a>
|
||||
% endif
|
||||
@@ -1249,7 +1291,7 @@
|
||||
<table class="notification-params time-options">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="3">
|
||||
Year
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1270,7 +1312,7 @@
|
||||
<table class="notification-params time-options">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="3">
|
||||
Month
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1301,7 +1343,7 @@
|
||||
<table class="notification-params time-options">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="3">
|
||||
Day of the Year
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1322,7 +1364,7 @@
|
||||
<table class="notification-params time-options">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="3">
|
||||
Day of the Month
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1348,7 +1390,7 @@
|
||||
<table class="notification-params time-options">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="3">
|
||||
Day of the Week
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1374,7 +1416,7 @@
|
||||
<table class="notification-params time-options">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="3">
|
||||
Hour
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1405,7 +1447,7 @@
|
||||
<table class="notification-params time-options">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="3">
|
||||
Minute
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1426,7 +1468,7 @@
|
||||
<table class="notification-params time-options">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="3">
|
||||
Second
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1447,7 +1489,7 @@
|
||||
<table class="notification-params time-options">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="3">
|
||||
AM / PM
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1468,7 +1510,7 @@
|
||||
<table class="notification-params time-options">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="3">
|
||||
Timezone
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1489,7 +1531,7 @@
|
||||
<table class="notification-params time-options">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="3">
|
||||
Timestamp
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1518,11 +1560,11 @@
|
||||
<div class="modal-body" id="modal-text">
|
||||
<div>
|
||||
<p class="help-block">
|
||||
This will attempt to fetch your token for you. This will not work on Internet Explorer 9 or lower.
|
||||
PlexPy does not store your username and password.
|
||||
This will attempt to fetch a new Plex.tv token for you. PlexPy does not store your username and password.
|
||||
Note: This will not work on Internet Explorer 9 or lower.
|
||||
</p>
|
||||
<div class="form-group">
|
||||
<label for="pms_username">PMS Username</label>
|
||||
<label for="pms_username">Plex.tv Username</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="pms_username" name="pms_username" size="30">
|
||||
@@ -1531,7 +1573,7 @@
|
||||
<p class="help-block">Username for Plex.tv authentication.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="pms_password">PMS Password</label>
|
||||
<label for="pms_password">Plex.tv Password</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="password" class="form-control" id="pms_password" name="pms_password" size="30">
|
||||
@@ -1559,7 +1601,7 @@
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i
|
||||
class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">Notification String Substitutions</h4>
|
||||
<h4 class="modal-title">Notification Parameters</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div>
|
||||
@@ -1569,7 +1611,7 @@
|
||||
<table class="notification-params">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="2">
|
||||
Global
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1604,7 +1646,7 @@
|
||||
<table class="notification-params">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="2">
|
||||
Stream Details
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1614,6 +1656,10 @@
|
||||
<td><strong>{streams}</strong></td>
|
||||
<td>The number of concurrent streams.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{user_streams}</strong></td>
|
||||
<td>The number of concurrent streams by the person streaming.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{user}</strong></td>
|
||||
<td>The friendly name of the person streaming.</td>
|
||||
@@ -1759,7 +1805,7 @@
|
||||
<table class="notification-params">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="2">
|
||||
Metadata Details
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1825,6 +1871,26 @@
|
||||
<td><strong>{year}</strong></td>
|
||||
<td>The release year for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{release_date}</strong></td>
|
||||
<td>The release date (in date format) for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{air_date}</strong></td>
|
||||
<td>The air date (in date format) for the item.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{added_date}</strong></td>
|
||||
<td>The date (in date format) the item was added to Plex.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{updated_date}</strong></td>
|
||||
<td>The date (in date format) the item was updated on Plex.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{last_viewed_date}</strong></td>
|
||||
<td>The date (in date format) the item was last viewed on Plex.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{studio}</strong></td>
|
||||
<td>The studio for the item.</td>
|
||||
@@ -1876,12 +1942,12 @@
|
||||
<tr>
|
||||
<td><strong>{imdb_id}</strong></td>
|
||||
<td>The IMDB ID for the movie. <span class="small-muted">(e.g. tt2488496)</span>
|
||||
<p class="small-muted">(PMS agent must be Freebase)</p></td>
|
||||
<p class="small-muted">(PMS agent must be Plex Movie)</p></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{imdb_url}</strong></td>
|
||||
<td>The IMDB URL for the movie.
|
||||
<p class="small-muted">(PMS agent must be Freebase)</p></td>
|
||||
<p class="small-muted">(PMS agent must be Plex Movie)</p></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{thetvdb_id}</strong></td>
|
||||
@@ -1933,7 +1999,7 @@
|
||||
<table class="notification-params">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
<th colspan="2">
|
||||
Plex Update Available
|
||||
</th>
|
||||
</tr>
|
||||
@@ -1948,8 +2014,40 @@
|
||||
<td>The available update download URL.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_changelog}</strong></td>
|
||||
<td>The changelog for the available update.</td>
|
||||
<td><strong>{update_release_date}</strong></td>
|
||||
<td>The release date of the update version.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_channel}</strong></td>
|
||||
<td>The update channel. <span class="small-muted">(Public or Plex Pass)</span></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_platform}</strong></td>
|
||||
<td>The platform of your Plex Server.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_distro}</strong></td>
|
||||
<td>The distro of your Plex Server.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_distro_build}</strong></td>
|
||||
<td>The distro build of your Plex Server.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_requirements}</strong></td>
|
||||
<td>The requirements for the available update.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_extra_info}</strong></td>
|
||||
<td>Any extra info for the available update.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_changelog_added}</strong></td>
|
||||
<td>The added changelog for the available update.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>{update_changelog_fixed}</strong></td>
|
||||
<td>The fixed changelog for the available update.</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
@@ -1968,27 +2066,27 @@
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div>
|
||||
<div class="wellheader">
|
||||
<h4>Movie Tag <strong><movie></movie></strong></h4>
|
||||
</div>
|
||||
<div>
|
||||
<p class="help-block">All text inside a <strong>movie</strong> tag will only be sent when the media item being played back is a movie.</p>
|
||||
<h4>Movie Tag</h4>
|
||||
</div>
|
||||
<div style="padding-bottom: 10px;">
|
||||
<p class="help-block">All text inside <span class="inline-pre"><movie></movie></span> tags will only be sent when the media item is a movie.</p>
|
||||
<p><strong style="color: #fff;">Example:</strong></p>
|
||||
<pre>{user} has started playing {title} <movie>({year})</movie></pre>
|
||||
</div>
|
||||
<div class="wellheader">
|
||||
<h4>TV Tag <strong><tv></tv></strong></h4>
|
||||
</div>
|
||||
<div>
|
||||
<p class="help-block">All text inside a <strong>tv</strong> tag will only be sent when the media item being played back is an episode.</p>
|
||||
<h4>TV Tag</h4>
|
||||
</div>
|
||||
<div style="padding-bottom: 10px;">
|
||||
<p class="help-block">All text inside <span class="inline-pre"><tv></tv></span> tags will only be sent when the media item is an episode.</p>
|
||||
<p><strong style="color: #fff;">Example:</strong></p>
|
||||
<pre>{user} has started playing {title} <tv>(S{season_num}E{episode_num})</tv></pre>
|
||||
</div>
|
||||
<div class="wellheader">
|
||||
<h4>Music Tag <strong><music></music></strong></h4>
|
||||
<div>
|
||||
<h4>Music Tag</h4>
|
||||
</div>
|
||||
<div>
|
||||
<p class="help-block">All text inside a <strong>music</strong> tag will only be sent when the media item being played back is a music track.</p>
|
||||
<p class="help-block">All text inside <span class="inline-pre"><music></music></span> tags will only be sent when the media item is a track.</p>
|
||||
<p><strong style="color: #fff;">Example:</strong></p>
|
||||
<pre>{user} has started playing {title} <music>(Track {track_num})</music></pre>
|
||||
</div>
|
||||
@@ -2030,26 +2128,6 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div id="guidelines-modal" class="modal fade" tabindex="-1" role="dialog" aria-labelledby="guidelines-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">Guidelines</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div style="text-align: center; margin-top: 20px; margin-bottom: 20px;">
|
||||
<strong>Please read the <a href="#" target="_blank" id="guidelines-link">guidelines</a> in the README document <br />before submitting a new <span id="guidelines-type"></span>!</strong>
|
||||
<br /><br />
|
||||
Your post may be removed for failure to follow the guidelines.
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="#" target="_blank" id="guidelines-continue" class="btn btn-bright">Continue</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</%def>
|
||||
|
||||
@@ -2058,6 +2136,38 @@
|
||||
<script src="${http_root}js/Sortable.min.js"></script>
|
||||
<script src="${http_root}js/moment-with-locale.js"></script>
|
||||
<script>
|
||||
function getConfigurationTable() {
|
||||
$.ajax({
|
||||
url: 'get_configuration_table',
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-configuration-table").html(xhr.responseText);
|
||||
if ("${kwargs.get('install_geoip')}" == 'true') {
|
||||
$('#install_geoip_db').removeClass('no-highlight').css('color','#e9a049');
|
||||
} else if ("${kwargs.get('reinstall_geoip')}" == 'true') {
|
||||
$('#reinstall_geoip_db').removeClass('no-highlight').css('color','#e9a049');
|
||||
}
|
||||
if ("${kwargs.get('support')}" == 'true') {
|
||||
$('.support-modal-link').removeClass('no-highlight').css('color','#e9a049');
|
||||
$('#best-support-link').prepend('<span data-toggle="tooltip" title="Most Active"><i class="fa fa-star"></i></span> ')
|
||||
$('#best-support-link span').tooltip({ container: 'body' });
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function getSchedulerTable() {
|
||||
$.ajax({
|
||||
url: 'get_scheduler_table',
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-scheduler-table").html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$(document).ready(function() {
|
||||
|
||||
// Javascript to enable link to tab
|
||||
@@ -2094,7 +2204,9 @@ $(document).ready(function() {
|
||||
$('#restart-modal').modal('show');
|
||||
}
|
||||
$("#http_hashed_password").val($("#http_hash_password").is(":checked") ? 1 : 0)
|
||||
getConfigurationTable();
|
||||
getSchedulerTable();
|
||||
loadUpdateDistros();
|
||||
settingsChanged = false;
|
||||
}
|
||||
|
||||
@@ -2105,7 +2217,7 @@ $(document).ready(function() {
|
||||
|
||||
function saveSettings() {
|
||||
if (configForm.parsley().validate()) {
|
||||
doAjaxCall('configUpdate', $(this), 'tabs', true, postSaveChecks);
|
||||
doAjaxCall('configUpdate', $(this), 'tabs', true, true, postSaveChecks);
|
||||
return false;
|
||||
} else {
|
||||
showMsg('<i class="fa fa-exclamation-circle"></i> Please verify your settings.', false, true, 5000, true)
|
||||
@@ -2125,7 +2237,8 @@ $(document).ready(function() {
|
||||
initConfigCheckbox('#https_create_cert');
|
||||
initConfigCheckbox('#check_github');
|
||||
initConfigCheckbox('#notify_upload_posters');
|
||||
|
||||
initConfigCheckbox('#monitor_pms_updates');
|
||||
|
||||
$("#menu_link_shutdown").click(function() {
|
||||
$("#confirm-message").text("Are you sure you want to shutdown PlexPy?");
|
||||
$('#confirm-modal').modal();
|
||||
@@ -2154,38 +2267,9 @@ $(document).ready(function() {
|
||||
window.location.href = "restart";
|
||||
});
|
||||
|
||||
function getSchedulerTable() {
|
||||
$.ajax({
|
||||
url: 'get_scheduler_table',
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
$("#plexpy-scheduler-table").html(xhr.responseText);
|
||||
}
|
||||
});
|
||||
}
|
||||
getConfigurationTable();
|
||||
getSchedulerTable();
|
||||
|
||||
function confirmAjaxCall (url, msg) {
|
||||
$("#confirm-message").text(msg);
|
||||
$('#confirm-modal').modal();
|
||||
$('#confirm-modal').one('click', '#confirm-button', function () {
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: 'POST',
|
||||
complete: function (xhr, status) {
|
||||
result = $.parseJSON(xhr.responseText);
|
||||
msg = result.message;
|
||||
if (result.result == 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000)
|
||||
} else {
|
||||
showMsg('<i class="fa fa-times"></i> ' + msg, false, true, 5000, true)
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
$("#backup_config").click(function () {
|
||||
var msg = 'Are you sure you want to create a backup of the PlexPy config?';
|
||||
var url = 'backup_config';
|
||||
@@ -2216,6 +2300,11 @@ $(document).ready(function() {
|
||||
confirmAjaxCall(url, msg);
|
||||
});
|
||||
|
||||
$("#delete_temp_sessions").click(function () {
|
||||
var msg = 'Are you sure you want to flush the temporary sessions?<br /><strong>This will reset all currently active sessions.</strong>';
|
||||
var url = 'delete_temp_sessions';
|
||||
confirmAjaxCall(url, msg);
|
||||
});
|
||||
|
||||
$('#api_key').click(function(){ $('#api_key').select() });
|
||||
$("#generate_api").click(function() {
|
||||
@@ -2312,22 +2401,26 @@ $(document).ready(function() {
|
||||
if ((pms_username !== '') && (pms_password !== '')) {
|
||||
$.ajax({
|
||||
type: 'GET',
|
||||
url: 'get_pms_token',
|
||||
url: 'get_plexpy_pms_token',
|
||||
data: {
|
||||
username: pms_username,
|
||||
password: pms_password
|
||||
password: pms_password,
|
||||
force: true
|
||||
},
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
var authToken = $.parseJSON(xhr.responseText);
|
||||
if (authToken) {
|
||||
$("#pms-token-status").html('<i class="fa fa-check"></i> Authentication successful!');
|
||||
var result = $.parseJSON(xhr.responseText);
|
||||
var msg = result.message;
|
||||
if (result.result == 'success') {
|
||||
var authToken = result.token;
|
||||
$("#pms-token-status").html('<i class="fa fa-check"></i> ' + msg);
|
||||
$("#pms_token").val(authToken);
|
||||
$('#pms-auth-modal').modal('hide');
|
||||
} else {
|
||||
$("#pms-token-status").html('<i class="fa fa-exclamation-circle"></i> Invalid username or password.');
|
||||
$("#pms-token-status").html('<i class="fa fa-exclamation-circle"></i> ' + msg);
|
||||
}
|
||||
loadUpdateDistros();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
@@ -2386,33 +2479,26 @@ $(document).ready(function() {
|
||||
pms_logs = false;
|
||||
|
||||
// Checks to see if PMS server version is >= 0.9.14 with automaatically logged IP addresses
|
||||
$.ajax({
|
||||
url: 'get_server_identity',
|
||||
async: true,
|
||||
success: function(data) {
|
||||
if (data.version){ $("#pms_version").text(data.version); }
|
||||
var version = (data.version ? data.version.split('.') : null);
|
||||
if (version && parseInt(version[0]) >= 0 && parseInt(version[1]) >= 9 && parseInt(version[2]) >= 14) {
|
||||
$("#debugLogCheck").html("IP address is automatically logged for PMS version 0.9.14 and above.");
|
||||
$("#ip_logging_enable").attr("disabled", true);
|
||||
$("#ip_logging_enable").attr("checked", true);
|
||||
pms_version = true;
|
||||
var version = parseInt($.map("${config['pms_version']}".split('-')[0].split('.').slice(0,4), function(v) { return ('0000'+v).substring(v.length); }).join(""));
|
||||
if (version > 900140000) {
|
||||
$("#debugLogCheck").html("IP address is automatically logged for PMS version 0.9.14 and above.");
|
||||
$("#ip_logging_enable").attr("disabled", true);
|
||||
$("#ip_logging_enable").attr("checked", true);
|
||||
pms_version = true;
|
||||
checkLogsPath();
|
||||
} else {
|
||||
// Check to see if debug logs are enabled on the PMS.
|
||||
$.ajax({
|
||||
url: 'get_server_pref',
|
||||
data: { pref: 'logDebug' },
|
||||
async: true,
|
||||
success: function(data) {
|
||||
pms_logs_debug = (data == 'true' ? true : false);
|
||||
// Check to see if our logs folder is set before allowing IP logging to be enabled.
|
||||
checkLogsPath();
|
||||
} else {
|
||||
// Check to see if debug logs are enabled on the PMS.
|
||||
$.ajax({
|
||||
url: 'get_server_pref',
|
||||
data: { pref: 'logDebug' },
|
||||
async: true,
|
||||
success: function(data) {
|
||||
pms_logs_debug = (data == 'true' ? true : false);
|
||||
// Check to see if our logs folder is set before allowing IP logging to be enabled.
|
||||
checkLogsPath();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
$("#pms_logs_folder").change(function() {
|
||||
checkLogsPath();
|
||||
@@ -2566,16 +2652,6 @@ $(document).ready(function() {
|
||||
$('#notify_recently_added_grandparent_note').css('color', c);
|
||||
});
|
||||
|
||||
$('.guidelines-modal-link').on('click', function (e) {
|
||||
e.preventDefault();
|
||||
$('#guidelines-link').attr('href', $('#source-link').attr('href'));
|
||||
$('#guidelines-type').text($(this).data('id'))
|
||||
$('#guidelines-modal').modal();
|
||||
$('#guidelines-continue').attr('href', $(this).attr('href')).on('click', function () {
|
||||
$('#guidelines-modal').modal('hide');
|
||||
});
|
||||
});
|
||||
|
||||
function allowGuestAccessCheck () {
|
||||
if ($("#http_basic_auth").is(":checked")) {
|
||||
$("#allow_guest_access").attr("disabled", true);
|
||||
@@ -2622,6 +2698,56 @@ $(document).ready(function() {
|
||||
$("#http_hashed_password").val($("#http_hash_password").is(":checked") ? 1 : 0);
|
||||
$("#http_hash_password_error").html("");
|
||||
});
|
||||
|
||||
// Load PMS downloads
|
||||
function loadUpdateDistros() {
|
||||
var update_params_ajax = $.getJSON('get_server_update_params', function (data) { return data; });
|
||||
|
||||
$.when(update_params_ajax).done(function() {
|
||||
var update_params = update_params_ajax.responseJSON;
|
||||
|
||||
var plexpass = update_params.plexpass;
|
||||
var platform = update_params.pms_platform;
|
||||
var update_channel = update_params.pms_update_channel;
|
||||
var update_distro = update_params.pms_update_distro;
|
||||
var update_distro_build = update_params.pms_update_distro_build;
|
||||
|
||||
$("#pms_update_channel option[value='plexpass']").remove();
|
||||
if (plexpass) {
|
||||
var selected = (update_channel == 'plexpass') ? true : false;
|
||||
$('#pms_update_channel')
|
||||
.append($('<option></option>')
|
||||
.text('Plex Pass')
|
||||
.val('plexpass')
|
||||
.prop('selected', selected));
|
||||
}
|
||||
|
||||
$.getJSON('https://plex.tv/api/downloads/1.json?channel=' + update_channel, function (downloads) {
|
||||
platform_downloads = downloads.computer[platform] || downloads.nas[platform];
|
||||
if (platform_downloads) {
|
||||
$("#pms_update_distro_build option").remove();
|
||||
$.each(platform_downloads.releases, function (index, item) {
|
||||
var label = (platform_downloads.releases.length == 1) ? platform_downloads.name : platform_downloads.name + ' - ' + item.label;
|
||||
var selected = (item.distro == update_distro && item.build == update_distro_build) ? true : false;
|
||||
$('#pms_update_distro_build')
|
||||
.append($('<option></option>')
|
||||
.text(label)
|
||||
.val(item.build)
|
||||
.attr('data-distro', item.distro)
|
||||
.prop('selected', selected));
|
||||
})
|
||||
$('#pms_update_distro').val($("#pms_update_distro_build option:selected").data('distro'))
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
loadUpdateDistros();
|
||||
|
||||
|
||||
$('#pms_update_distro_build').change(function () {
|
||||
var distro = $("option:selected", this).data('distro')
|
||||
$('#pms_update_distro').val(distro)
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</%def>
|
||||
|
@@ -53,7 +53,9 @@ DOCUMENTATION :: END
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<h4><strong>Stream Details</strong></h4>
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Stream Details</strong></h4>
|
||||
</div>
|
||||
<div class="col-sm-4">
|
||||
<h5>Media</h5>
|
||||
<ul class="list-unstyled">
|
||||
@@ -95,7 +97,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<h4><strong>Source Details</strong></h4>
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Source Details</strong></h4>
|
||||
</div>
|
||||
<div class="col-sm-4">
|
||||
<h5>Media</h5>
|
||||
<ul class="list-unstyled">
|
||||
|
@@ -383,7 +383,7 @@ DOCUMENTATION :: END
|
||||
|
||||
// Populate watch time stats
|
||||
$.ajax({
|
||||
url: 'get_user_watch_time_stats',
|
||||
url: 'user_watch_time_stats',
|
||||
async: true,
|
||||
data: { user_id: user_id, user: username },
|
||||
complete: function(xhr, status) {
|
||||
@@ -393,7 +393,7 @@ DOCUMENTATION :: END
|
||||
|
||||
// Populate platform stats
|
||||
$.ajax({
|
||||
url: 'get_user_player_stats',
|
||||
url: 'user_player_stats',
|
||||
async: true,
|
||||
data: { user_id: user_id, user: username },
|
||||
complete: function(xhr, status) {
|
||||
|
@@ -49,6 +49,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
|
@@ -418,7 +418,7 @@
|
||||
if ((pms_username !== '') && (pms_password !== '')) {
|
||||
$.ajax({
|
||||
type: 'GET',
|
||||
url: 'get_pms_token',
|
||||
url: 'get_plexpy_pms_token',
|
||||
data: {
|
||||
username: pms_username,
|
||||
password: pms_password
|
||||
@@ -426,15 +426,17 @@
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function (xhr, status) {
|
||||
var authToken = $.parseJSON(xhr.responseText);
|
||||
if (authToken) {
|
||||
$("#pms-token-status").html('<i class="fa fa-check"></i> Authentation successful!');
|
||||
var result = $.parseJSON(xhr.responseText);
|
||||
var msg = result.message;
|
||||
if (result.result == 'success') {
|
||||
var authToken = result.token;
|
||||
$("#pms-token-status").html('<i class="fa fa-check"></i> ' + msg);
|
||||
$('#pms-token-status').fadeIn('fast');
|
||||
$("#pms_token").val(authToken);
|
||||
authenticated = true;
|
||||
getServerOptions(authToken)
|
||||
} else {
|
||||
$("#pms-token-status").html('<i class="fa fa-exclamation-circle"></i> Invalid username or password.');
|
||||
$("#pms-token-status").html('<i class="fa fa-exclamation-circle"></i> ' + msg);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@@ -14,7 +14,7 @@
|
||||
# default. Do not sets it as empty or it will run
|
||||
# as root.
|
||||
# plexpy_dir: Directory where PlexPy lives.
|
||||
# Default: /usr/local/plexpy
|
||||
# Default: /usr/local/share/plexpy
|
||||
# plexpy_chdir: Change to this directory before running PlexPy.
|
||||
# Default is same as plexpy_dir.
|
||||
# plexpy_pid: The name of the pidfile to create.
|
||||
@@ -30,7 +30,7 @@ load_rc_config ${name}
|
||||
|
||||
: ${plexpy_enable:="NO"}
|
||||
: ${plexpy_user:="_sabnzbd"}
|
||||
: ${plexpy_dir:="/usr/local/plexpy"}
|
||||
: ${plexpy_dir:="/usr/local/share/plexpy"}
|
||||
: ${plexpy_chdir:="${plexpy_dir}"}
|
||||
: ${plexpy_pid:="${plexpy_dir}/plexpy.pid"}
|
||||
: ${plexpy_flags:=""}
|
||||
@@ -49,23 +49,33 @@ fi
|
||||
|
||||
verify_plexpy_pid() {
|
||||
# Make sure the pid corresponds to the PlexPy process.
|
||||
pid=`cat ${plexpy_pid} 2>/dev/null`
|
||||
ps -p ${pid} | grep -q "python ${plexpy_dir}/PlexPy.py"
|
||||
return $?
|
||||
if [ -f ${plexpy_pid} ]; then
|
||||
pid=`cat ${plexpy_pid} 2>/dev/null`
|
||||
ps -p ${pid} | grep -q "python2 ${plexpy_dir}/PlexPy.py"
|
||||
return $?
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# Try to stop PlexPy cleanly by calling shutdown over http.
|
||||
# Try to stop PlexPy cleanly by sending SIGTERM
|
||||
plexpy_stop() {
|
||||
echo "Stopping $name"
|
||||
verify_plexpy_pid
|
||||
if [ -n "${pid}" ]; then
|
||||
kill ${pid}
|
||||
wait_for_pids ${pid}
|
||||
echo "Stopped"
|
||||
echo "Stopped."
|
||||
fi
|
||||
}
|
||||
|
||||
plexpy_status() {
|
||||
verify_plexpy_pid && echo "$name is running as ${pid}" || echo "$name is not running"
|
||||
verify_plexpy_pid
|
||||
if [ -n "${pid}" ]; then
|
||||
echo "$name is running as ${pid}."
|
||||
else
|
||||
echo "$name is not running."
|
||||
fi
|
||||
}
|
||||
|
||||
run_rc_command "$1"
|
||||
|
@@ -14,7 +14,7 @@
|
||||
# default. Do not sets it as empty or it will run
|
||||
# as root.
|
||||
# plexpy_dir: Directory where PlexPy lives.
|
||||
# Default: /usr/local/plexpy
|
||||
# Default: /usr/local/share/plexpy
|
||||
# plexpy_chdir: Change to this directory before running PlexPy.
|
||||
# Default is same as plexpy_dir.
|
||||
# plexpy_pid: The name of the pidfile to create.
|
||||
@@ -58,7 +58,7 @@ verify_plexpy_pid() {
|
||||
fi
|
||||
}
|
||||
|
||||
# Try to stop PlexPy cleanly by calling shutdown over http.
|
||||
# Try to stop PlexPy cleanly by sending SIGTERM
|
||||
plexpy_stop() {
|
||||
echo "Stopping $name."
|
||||
verify_plexpy_pid
|
||||
|
54
lib/dns/__init__.py
Normal file
54
lib/dns/__init__.py
Normal file
@@ -0,0 +1,54 @@
|
||||
# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""dnspython DNS toolkit"""
|
||||
|
||||
__all__ = [
|
||||
'dnssec',
|
||||
'e164',
|
||||
'edns',
|
||||
'entropy',
|
||||
'exception',
|
||||
'flags',
|
||||
'hash',
|
||||
'inet',
|
||||
'ipv4',
|
||||
'ipv6',
|
||||
'message',
|
||||
'name',
|
||||
'namedict',
|
||||
'node',
|
||||
'opcode',
|
||||
'query',
|
||||
'rcode',
|
||||
'rdata',
|
||||
'rdataclass',
|
||||
'rdataset',
|
||||
'rdatatype',
|
||||
'renderer',
|
||||
'resolver',
|
||||
'reversename',
|
||||
'rrset',
|
||||
'set',
|
||||
'tokenizer',
|
||||
'tsig',
|
||||
'tsigkeyring',
|
||||
'ttl',
|
||||
'rdtypes',
|
||||
'update',
|
||||
'version',
|
||||
'wiredata',
|
||||
'zone',
|
||||
]
|
21
lib/dns/_compat.py
Normal file
21
lib/dns/_compat.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import sys
|
||||
|
||||
|
||||
if sys.version_info > (3,):
|
||||
long = int
|
||||
xrange = range
|
||||
else:
|
||||
long = long
|
||||
xrange = xrange
|
||||
|
||||
# unicode / binary types
|
||||
if sys.version_info > (3,):
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
string_types = (str,)
|
||||
unichr = chr
|
||||
else:
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
string_types = (basestring,)
|
||||
unichr = unichr
|
457
lib/dns/dnssec.py
Normal file
457
lib/dns/dnssec.py
Normal file
@@ -0,0 +1,457 @@
|
||||
# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""Common DNSSEC-related functions and constants."""
|
||||
|
||||
from io import BytesIO
|
||||
import struct
|
||||
import time
|
||||
|
||||
import dns.exception
|
||||
import dns.hash
|
||||
import dns.name
|
||||
import dns.node
|
||||
import dns.rdataset
|
||||
import dns.rdata
|
||||
import dns.rdatatype
|
||||
import dns.rdataclass
|
||||
from ._compat import string_types
|
||||
|
||||
|
||||
class UnsupportedAlgorithm(dns.exception.DNSException):
|
||||
|
||||
"""The DNSSEC algorithm is not supported."""
|
||||
|
||||
|
||||
class ValidationFailure(dns.exception.DNSException):
|
||||
|
||||
"""The DNSSEC signature is invalid."""
|
||||
|
||||
RSAMD5 = 1
|
||||
DH = 2
|
||||
DSA = 3
|
||||
ECC = 4
|
||||
RSASHA1 = 5
|
||||
DSANSEC3SHA1 = 6
|
||||
RSASHA1NSEC3SHA1 = 7
|
||||
RSASHA256 = 8
|
||||
RSASHA512 = 10
|
||||
ECDSAP256SHA256 = 13
|
||||
ECDSAP384SHA384 = 14
|
||||
INDIRECT = 252
|
||||
PRIVATEDNS = 253
|
||||
PRIVATEOID = 254
|
||||
|
||||
_algorithm_by_text = {
|
||||
'RSAMD5': RSAMD5,
|
||||
'DH': DH,
|
||||
'DSA': DSA,
|
||||
'ECC': ECC,
|
||||
'RSASHA1': RSASHA1,
|
||||
'DSANSEC3SHA1': DSANSEC3SHA1,
|
||||
'RSASHA1NSEC3SHA1': RSASHA1NSEC3SHA1,
|
||||
'RSASHA256': RSASHA256,
|
||||
'RSASHA512': RSASHA512,
|
||||
'INDIRECT': INDIRECT,
|
||||
'ECDSAP256SHA256': ECDSAP256SHA256,
|
||||
'ECDSAP384SHA384': ECDSAP384SHA384,
|
||||
'PRIVATEDNS': PRIVATEDNS,
|
||||
'PRIVATEOID': PRIVATEOID,
|
||||
}
|
||||
|
||||
# We construct the inverse mapping programmatically to ensure that we
|
||||
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
|
||||
# would cause the mapping not to be true inverse.
|
||||
|
||||
_algorithm_by_value = dict((y, x) for x, y in _algorithm_by_text.items())
|
||||
|
||||
|
||||
def algorithm_from_text(text):
|
||||
"""Convert text into a DNSSEC algorithm value
|
||||
@rtype: int"""
|
||||
|
||||
value = _algorithm_by_text.get(text.upper())
|
||||
if value is None:
|
||||
value = int(text)
|
||||
return value
|
||||
|
||||
|
||||
def algorithm_to_text(value):
|
||||
"""Convert a DNSSEC algorithm value to text
|
||||
@rtype: string"""
|
||||
|
||||
text = _algorithm_by_value.get(value)
|
||||
if text is None:
|
||||
text = str(value)
|
||||
return text
|
||||
|
||||
|
||||
def _to_rdata(record, origin):
|
||||
s = BytesIO()
|
||||
record.to_wire(s, origin=origin)
|
||||
return s.getvalue()
|
||||
|
||||
|
||||
def key_id(key, origin=None):
|
||||
rdata = _to_rdata(key, origin)
|
||||
rdata = bytearray(rdata)
|
||||
if key.algorithm == RSAMD5:
|
||||
return (rdata[-3] << 8) + rdata[-2]
|
||||
else:
|
||||
total = 0
|
||||
for i in range(len(rdata) // 2):
|
||||
total += (rdata[2 * i] << 8) + \
|
||||
rdata[2 * i + 1]
|
||||
if len(rdata) % 2 != 0:
|
||||
total += rdata[len(rdata) - 1] << 8
|
||||
total += ((total >> 16) & 0xffff)
|
||||
return total & 0xffff
|
||||
|
||||
|
||||
def make_ds(name, key, algorithm, origin=None):
|
||||
if algorithm.upper() == 'SHA1':
|
||||
dsalg = 1
|
||||
hash = dns.hash.hashes['SHA1']()
|
||||
elif algorithm.upper() == 'SHA256':
|
||||
dsalg = 2
|
||||
hash = dns.hash.hashes['SHA256']()
|
||||
else:
|
||||
raise UnsupportedAlgorithm('unsupported algorithm "%s"' % algorithm)
|
||||
|
||||
if isinstance(name, string_types):
|
||||
name = dns.name.from_text(name, origin)
|
||||
hash.update(name.canonicalize().to_wire())
|
||||
hash.update(_to_rdata(key, origin))
|
||||
digest = hash.digest()
|
||||
|
||||
dsrdata = struct.pack("!HBB", key_id(key), key.algorithm, dsalg) + digest
|
||||
return dns.rdata.from_wire(dns.rdataclass.IN, dns.rdatatype.DS, dsrdata, 0,
|
||||
len(dsrdata))
|
||||
|
||||
|
||||
def _find_candidate_keys(keys, rrsig):
|
||||
candidate_keys = []
|
||||
value = keys.get(rrsig.signer)
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, dns.node.Node):
|
||||
try:
|
||||
rdataset = value.find_rdataset(dns.rdataclass.IN,
|
||||
dns.rdatatype.DNSKEY)
|
||||
except KeyError:
|
||||
return None
|
||||
else:
|
||||
rdataset = value
|
||||
for rdata in rdataset:
|
||||
if rdata.algorithm == rrsig.algorithm and \
|
||||
key_id(rdata) == rrsig.key_tag:
|
||||
candidate_keys.append(rdata)
|
||||
return candidate_keys
|
||||
|
||||
|
||||
def _is_rsa(algorithm):
|
||||
return algorithm in (RSAMD5, RSASHA1,
|
||||
RSASHA1NSEC3SHA1, RSASHA256,
|
||||
RSASHA512)
|
||||
|
||||
|
||||
def _is_dsa(algorithm):
|
||||
return algorithm in (DSA, DSANSEC3SHA1)
|
||||
|
||||
|
||||
def _is_ecdsa(algorithm):
|
||||
return _have_ecdsa and (algorithm in (ECDSAP256SHA256, ECDSAP384SHA384))
|
||||
|
||||
|
||||
def _is_md5(algorithm):
|
||||
return algorithm == RSAMD5
|
||||
|
||||
|
||||
def _is_sha1(algorithm):
|
||||
return algorithm in (DSA, RSASHA1,
|
||||
DSANSEC3SHA1, RSASHA1NSEC3SHA1)
|
||||
|
||||
|
||||
def _is_sha256(algorithm):
|
||||
return algorithm in (RSASHA256, ECDSAP256SHA256)
|
||||
|
||||
|
||||
def _is_sha384(algorithm):
|
||||
return algorithm == ECDSAP384SHA384
|
||||
|
||||
|
||||
def _is_sha512(algorithm):
|
||||
return algorithm == RSASHA512
|
||||
|
||||
|
||||
def _make_hash(algorithm):
|
||||
if _is_md5(algorithm):
|
||||
return dns.hash.hashes['MD5']()
|
||||
if _is_sha1(algorithm):
|
||||
return dns.hash.hashes['SHA1']()
|
||||
if _is_sha256(algorithm):
|
||||
return dns.hash.hashes['SHA256']()
|
||||
if _is_sha384(algorithm):
|
||||
return dns.hash.hashes['SHA384']()
|
||||
if _is_sha512(algorithm):
|
||||
return dns.hash.hashes['SHA512']()
|
||||
raise ValidationFailure('unknown hash for algorithm %u' % algorithm)
|
||||
|
||||
|
||||
def _make_algorithm_id(algorithm):
|
||||
if _is_md5(algorithm):
|
||||
oid = [0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x02, 0x05]
|
||||
elif _is_sha1(algorithm):
|
||||
oid = [0x2b, 0x0e, 0x03, 0x02, 0x1a]
|
||||
elif _is_sha256(algorithm):
|
||||
oid = [0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01]
|
||||
elif _is_sha512(algorithm):
|
||||
oid = [0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x03]
|
||||
else:
|
||||
raise ValidationFailure('unknown algorithm %u' % algorithm)
|
||||
olen = len(oid)
|
||||
dlen = _make_hash(algorithm).digest_size
|
||||
idbytes = [0x30] + [8 + olen + dlen] + \
|
||||
[0x30, olen + 4] + [0x06, olen] + oid + \
|
||||
[0x05, 0x00] + [0x04, dlen]
|
||||
return struct.pack('!%dB' % len(idbytes), *idbytes)
|
||||
|
||||
|
||||
def _validate_rrsig(rrset, rrsig, keys, origin=None, now=None):
|
||||
"""Validate an RRset against a single signature rdata
|
||||
|
||||
The owner name of the rrsig is assumed to be the same as the owner name
|
||||
of the rrset.
|
||||
|
||||
@param rrset: The RRset to validate
|
||||
@type rrset: dns.rrset.RRset or (dns.name.Name, dns.rdataset.Rdataset)
|
||||
tuple
|
||||
@param rrsig: The signature rdata
|
||||
@type rrsig: dns.rrset.Rdata
|
||||
@param keys: The key dictionary.
|
||||
@type keys: a dictionary keyed by dns.name.Name with node or rdataset
|
||||
values
|
||||
@param origin: The origin to use for relative names
|
||||
@type origin: dns.name.Name or None
|
||||
@param now: The time to use when validating the signatures. The default
|
||||
is the current time.
|
||||
@type now: int
|
||||
"""
|
||||
|
||||
if isinstance(origin, string_types):
|
||||
origin = dns.name.from_text(origin, dns.name.root)
|
||||
|
||||
for candidate_key in _find_candidate_keys(keys, rrsig):
|
||||
if not candidate_key:
|
||||
raise ValidationFailure('unknown key')
|
||||
|
||||
# For convenience, allow the rrset to be specified as a (name,
|
||||
# rdataset) tuple as well as a proper rrset
|
||||
if isinstance(rrset, tuple):
|
||||
rrname = rrset[0]
|
||||
rdataset = rrset[1]
|
||||
else:
|
||||
rrname = rrset.name
|
||||
rdataset = rrset
|
||||
|
||||
if now is None:
|
||||
now = time.time()
|
||||
if rrsig.expiration < now:
|
||||
raise ValidationFailure('expired')
|
||||
if rrsig.inception > now:
|
||||
raise ValidationFailure('not yet valid')
|
||||
|
||||
hash = _make_hash(rrsig.algorithm)
|
||||
|
||||
if _is_rsa(rrsig.algorithm):
|
||||
keyptr = candidate_key.key
|
||||
(bytes_,) = struct.unpack('!B', keyptr[0:1])
|
||||
keyptr = keyptr[1:]
|
||||
if bytes_ == 0:
|
||||
(bytes_,) = struct.unpack('!H', keyptr[0:2])
|
||||
keyptr = keyptr[2:]
|
||||
rsa_e = keyptr[0:bytes_]
|
||||
rsa_n = keyptr[bytes_:]
|
||||
keylen = len(rsa_n) * 8
|
||||
pubkey = Crypto.PublicKey.RSA.construct(
|
||||
(Crypto.Util.number.bytes_to_long(rsa_n),
|
||||
Crypto.Util.number.bytes_to_long(rsa_e)))
|
||||
sig = (Crypto.Util.number.bytes_to_long(rrsig.signature),)
|
||||
elif _is_dsa(rrsig.algorithm):
|
||||
keyptr = candidate_key.key
|
||||
(t,) = struct.unpack('!B', keyptr[0:1])
|
||||
keyptr = keyptr[1:]
|
||||
octets = 64 + t * 8
|
||||
dsa_q = keyptr[0:20]
|
||||
keyptr = keyptr[20:]
|
||||
dsa_p = keyptr[0:octets]
|
||||
keyptr = keyptr[octets:]
|
||||
dsa_g = keyptr[0:octets]
|
||||
keyptr = keyptr[octets:]
|
||||
dsa_y = keyptr[0:octets]
|
||||
pubkey = Crypto.PublicKey.DSA.construct(
|
||||
(Crypto.Util.number.bytes_to_long(dsa_y),
|
||||
Crypto.Util.number.bytes_to_long(dsa_g),
|
||||
Crypto.Util.number.bytes_to_long(dsa_p),
|
||||
Crypto.Util.number.bytes_to_long(dsa_q)))
|
||||
(dsa_r, dsa_s) = struct.unpack('!20s20s', rrsig.signature[1:])
|
||||
sig = (Crypto.Util.number.bytes_to_long(dsa_r),
|
||||
Crypto.Util.number.bytes_to_long(dsa_s))
|
||||
elif _is_ecdsa(rrsig.algorithm):
|
||||
if rrsig.algorithm == ECDSAP256SHA256:
|
||||
curve = ecdsa.curves.NIST256p
|
||||
key_len = 32
|
||||
elif rrsig.algorithm == ECDSAP384SHA384:
|
||||
curve = ecdsa.curves.NIST384p
|
||||
key_len = 48
|
||||
else:
|
||||
# shouldn't happen
|
||||
raise ValidationFailure('unknown ECDSA curve')
|
||||
keyptr = candidate_key.key
|
||||
x = Crypto.Util.number.bytes_to_long(keyptr[0:key_len])
|
||||
y = Crypto.Util.number.bytes_to_long(keyptr[key_len:key_len * 2])
|
||||
assert ecdsa.ecdsa.point_is_valid(curve.generator, x, y)
|
||||
point = ecdsa.ellipticcurve.Point(curve.curve, x, y, curve.order)
|
||||
verifying_key = ecdsa.keys.VerifyingKey.from_public_point(point,
|
||||
curve)
|
||||
pubkey = ECKeyWrapper(verifying_key, key_len)
|
||||
r = rrsig.signature[:key_len]
|
||||
s = rrsig.signature[key_len:]
|
||||
sig = ecdsa.ecdsa.Signature(Crypto.Util.number.bytes_to_long(r),
|
||||
Crypto.Util.number.bytes_to_long(s))
|
||||
else:
|
||||
raise ValidationFailure('unknown algorithm %u' % rrsig.algorithm)
|
||||
|
||||
hash.update(_to_rdata(rrsig, origin)[:18])
|
||||
hash.update(rrsig.signer.to_digestable(origin))
|
||||
|
||||
if rrsig.labels < len(rrname) - 1:
|
||||
suffix = rrname.split(rrsig.labels + 1)[1]
|
||||
rrname = dns.name.from_text('*', suffix)
|
||||
rrnamebuf = rrname.to_digestable(origin)
|
||||
rrfixed = struct.pack('!HHI', rdataset.rdtype, rdataset.rdclass,
|
||||
rrsig.original_ttl)
|
||||
rrlist = sorted(rdataset)
|
||||
for rr in rrlist:
|
||||
hash.update(rrnamebuf)
|
||||
hash.update(rrfixed)
|
||||
rrdata = rr.to_digestable(origin)
|
||||
rrlen = struct.pack('!H', len(rrdata))
|
||||
hash.update(rrlen)
|
||||
hash.update(rrdata)
|
||||
|
||||
digest = hash.digest()
|
||||
|
||||
if _is_rsa(rrsig.algorithm):
|
||||
# PKCS1 algorithm identifier goop
|
||||
digest = _make_algorithm_id(rrsig.algorithm) + digest
|
||||
padlen = keylen // 8 - len(digest) - 3
|
||||
digest = struct.pack('!%dB' % (2 + padlen + 1),
|
||||
*([0, 1] + [0xFF] * padlen + [0])) + digest
|
||||
elif _is_dsa(rrsig.algorithm) or _is_ecdsa(rrsig.algorithm):
|
||||
pass
|
||||
else:
|
||||
# Raise here for code clarity; this won't actually ever happen
|
||||
# since if the algorithm is really unknown we'd already have
|
||||
# raised an exception above
|
||||
raise ValidationFailure('unknown algorithm %u' % rrsig.algorithm)
|
||||
|
||||
if pubkey.verify(digest, sig):
|
||||
return
|
||||
raise ValidationFailure('verify failure')
|
||||
|
||||
|
||||
def _validate(rrset, rrsigset, keys, origin=None, now=None):
|
||||
"""Validate an RRset
|
||||
|
||||
@param rrset: The RRset to validate
|
||||
@type rrset: dns.rrset.RRset or (dns.name.Name, dns.rdataset.Rdataset)
|
||||
tuple
|
||||
@param rrsigset: The signature RRset
|
||||
@type rrsigset: dns.rrset.RRset or (dns.name.Name, dns.rdataset.Rdataset)
|
||||
tuple
|
||||
@param keys: The key dictionary.
|
||||
@type keys: a dictionary keyed by dns.name.Name with node or rdataset
|
||||
values
|
||||
@param origin: The origin to use for relative names
|
||||
@type origin: dns.name.Name or None
|
||||
@param now: The time to use when validating the signatures. The default
|
||||
is the current time.
|
||||
@type now: int
|
||||
"""
|
||||
|
||||
if isinstance(origin, string_types):
|
||||
origin = dns.name.from_text(origin, dns.name.root)
|
||||
|
||||
if isinstance(rrset, tuple):
|
||||
rrname = rrset[0]
|
||||
else:
|
||||
rrname = rrset.name
|
||||
|
||||
if isinstance(rrsigset, tuple):
|
||||
rrsigname = rrsigset[0]
|
||||
rrsigrdataset = rrsigset[1]
|
||||
else:
|
||||
rrsigname = rrsigset.name
|
||||
rrsigrdataset = rrsigset
|
||||
|
||||
rrname = rrname.choose_relativity(origin)
|
||||
rrsigname = rrname.choose_relativity(origin)
|
||||
if rrname != rrsigname:
|
||||
raise ValidationFailure("owner names do not match")
|
||||
|
||||
for rrsig in rrsigrdataset:
|
||||
try:
|
||||
_validate_rrsig(rrset, rrsig, keys, origin, now)
|
||||
return
|
||||
except ValidationFailure:
|
||||
pass
|
||||
raise ValidationFailure("no RRSIGs validated")
|
||||
|
||||
|
||||
def _need_pycrypto(*args, **kwargs):
|
||||
raise NotImplementedError("DNSSEC validation requires pycrypto")
|
||||
|
||||
try:
|
||||
import Crypto.PublicKey.RSA
|
||||
import Crypto.PublicKey.DSA
|
||||
import Crypto.Util.number
|
||||
validate = _validate
|
||||
validate_rrsig = _validate_rrsig
|
||||
_have_pycrypto = True
|
||||
except ImportError:
|
||||
validate = _need_pycrypto
|
||||
validate_rrsig = _need_pycrypto
|
||||
_have_pycrypto = False
|
||||
|
||||
try:
|
||||
import ecdsa
|
||||
import ecdsa.ecdsa
|
||||
import ecdsa.ellipticcurve
|
||||
import ecdsa.keys
|
||||
_have_ecdsa = True
|
||||
|
||||
class ECKeyWrapper(object):
|
||||
|
||||
def __init__(self, key, key_len):
|
||||
self.key = key
|
||||
self.key_len = key_len
|
||||
|
||||
def verify(self, digest, sig):
|
||||
diglong = Crypto.Util.number.bytes_to_long(digest)
|
||||
return self.key.pubkey.verifies(diglong, sig)
|
||||
|
||||
except ImportError:
|
||||
_have_ecdsa = False
|
84
lib/dns/e164.py
Normal file
84
lib/dns/e164.py
Normal file
@@ -0,0 +1,84 @@
|
||||
# Copyright (C) 2006, 2007, 2009, 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS E.164 helpers
|
||||
|
||||
@var public_enum_domain: The DNS public ENUM domain, e164.arpa.
|
||||
@type public_enum_domain: dns.name.Name object
|
||||
"""
|
||||
|
||||
|
||||
import dns.exception
|
||||
import dns.name
|
||||
import dns.resolver
|
||||
from ._compat import string_types
|
||||
|
||||
public_enum_domain = dns.name.from_text('e164.arpa.')
|
||||
|
||||
|
||||
def from_e164(text, origin=public_enum_domain):
|
||||
"""Convert an E.164 number in textual form into a Name object whose
|
||||
value is the ENUM domain name for that number.
|
||||
@param text: an E.164 number in textual form.
|
||||
@type text: str
|
||||
@param origin: The domain in which the number should be constructed.
|
||||
The default is e164.arpa.
|
||||
@type origin: dns.name.Name object or None
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
parts = [d for d in text if d.isdigit()]
|
||||
parts.reverse()
|
||||
return dns.name.from_text('.'.join(parts), origin=origin)
|
||||
|
||||
|
||||
def to_e164(name, origin=public_enum_domain, want_plus_prefix=True):
|
||||
"""Convert an ENUM domain name into an E.164 number.
|
||||
@param name: the ENUM domain name.
|
||||
@type name: dns.name.Name object.
|
||||
@param origin: A domain containing the ENUM domain name. The
|
||||
name is relativized to this domain before being converted to text.
|
||||
@type origin: dns.name.Name object or None
|
||||
@param want_plus_prefix: if True, add a '+' to the beginning of the
|
||||
returned number.
|
||||
@rtype: str
|
||||
"""
|
||||
if origin is not None:
|
||||
name = name.relativize(origin)
|
||||
dlabels = [d for d in name.labels if (d.isdigit() and len(d) == 1)]
|
||||
if len(dlabels) != len(name.labels):
|
||||
raise dns.exception.SyntaxError('non-digit labels in ENUM domain name')
|
||||
dlabels.reverse()
|
||||
text = b''.join(dlabels)
|
||||
if want_plus_prefix:
|
||||
text = b'+' + text
|
||||
return text
|
||||
|
||||
|
||||
def query(number, domains, resolver=None):
|
||||
"""Look for NAPTR RRs for the specified number in the specified domains.
|
||||
|
||||
e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.'])
|
||||
"""
|
||||
if resolver is None:
|
||||
resolver = dns.resolver.get_default_resolver()
|
||||
for domain in domains:
|
||||
if isinstance(domain, string_types):
|
||||
domain = dns.name.from_text(domain)
|
||||
qname = dns.e164.from_e164(number, domain)
|
||||
try:
|
||||
return resolver.query(qname, 'NAPTR')
|
||||
except dns.resolver.NXDOMAIN:
|
||||
pass
|
||||
raise dns.resolver.NXDOMAIN
|
150
lib/dns/edns.py
Normal file
150
lib/dns/edns.py
Normal file
@@ -0,0 +1,150 @@
|
||||
# Copyright (C) 2009, 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""EDNS Options"""
|
||||
|
||||
NSID = 3
|
||||
|
||||
|
||||
class Option(object):
|
||||
|
||||
"""Base class for all EDNS option types.
|
||||
"""
|
||||
|
||||
def __init__(self, otype):
|
||||
"""Initialize an option.
|
||||
@param otype: The rdata type
|
||||
@type otype: int
|
||||
"""
|
||||
self.otype = otype
|
||||
|
||||
def to_wire(self, file):
|
||||
"""Convert an option to wire format.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, otype, wire, current, olen):
|
||||
"""Build an EDNS option object from wire format
|
||||
|
||||
@param otype: The option type
|
||||
@type otype: int
|
||||
@param wire: The wire-format message
|
||||
@type wire: string
|
||||
@param current: The offset in wire of the beginning of the rdata.
|
||||
@type current: int
|
||||
@param olen: The length of the wire-format option data
|
||||
@type olen: int
|
||||
@rtype: dns.edns.Option instance"""
|
||||
raise NotImplementedError
|
||||
|
||||
def _cmp(self, other):
|
||||
"""Compare an EDNS option with another option of the same type.
|
||||
Return < 0 if self < other, 0 if self == other,
|
||||
and > 0 if self > other.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Option):
|
||||
return False
|
||||
if self.otype != other.otype:
|
||||
return False
|
||||
return self._cmp(other) == 0
|
||||
|
||||
def __ne__(self, other):
|
||||
if not isinstance(other, Option):
|
||||
return False
|
||||
if self.otype != other.otype:
|
||||
return False
|
||||
return self._cmp(other) != 0
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, Option) or \
|
||||
self.otype != other.otype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) < 0
|
||||
|
||||
def __le__(self, other):
|
||||
if not isinstance(other, Option) or \
|
||||
self.otype != other.otype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) <= 0
|
||||
|
||||
def __ge__(self, other):
|
||||
if not isinstance(other, Option) or \
|
||||
self.otype != other.otype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) >= 0
|
||||
|
||||
def __gt__(self, other):
|
||||
if not isinstance(other, Option) or \
|
||||
self.otype != other.otype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) > 0
|
||||
|
||||
|
||||
class GenericOption(Option):
|
||||
|
||||
"""Generate Rdata Class
|
||||
|
||||
This class is used for EDNS option types for which we have no better
|
||||
implementation.
|
||||
"""
|
||||
|
||||
def __init__(self, otype, data):
|
||||
super(GenericOption, self).__init__(otype)
|
||||
self.data = data
|
||||
|
||||
def to_wire(self, file):
|
||||
file.write(self.data)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, otype, wire, current, olen):
|
||||
return cls(otype, wire[current: current + olen])
|
||||
|
||||
def _cmp(self, other):
|
||||
if self.data == other.data:
|
||||
return 0
|
||||
if self.data > other.data:
|
||||
return 1
|
||||
return -1
|
||||
|
||||
_type_to_class = {
|
||||
}
|
||||
|
||||
|
||||
def get_option_class(otype):
|
||||
cls = _type_to_class.get(otype)
|
||||
if cls is None:
|
||||
cls = GenericOption
|
||||
return cls
|
||||
|
||||
|
||||
def option_from_wire(otype, wire, current, olen):
|
||||
"""Build an EDNS option object from wire format
|
||||
|
||||
@param otype: The option type
|
||||
@type otype: int
|
||||
@param wire: The wire-format message
|
||||
@type wire: string
|
||||
@param current: The offset in wire of the beginning of the rdata.
|
||||
@type current: int
|
||||
@param olen: The length of the wire-format option data
|
||||
@type olen: int
|
||||
@rtype: dns.edns.Option instance"""
|
||||
|
||||
cls = get_option_class(otype)
|
||||
return cls.from_wire(otype, wire, current, olen)
|
127
lib/dns/entropy.py
Normal file
127
lib/dns/entropy.py
Normal file
@@ -0,0 +1,127 @@
|
||||
# Copyright (C) 2009, 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import os
|
||||
import time
|
||||
from ._compat import long, binary_type
|
||||
try:
|
||||
import threading as _threading
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
|
||||
|
||||
class EntropyPool(object):
|
||||
|
||||
def __init__(self, seed=None):
|
||||
self.pool_index = 0
|
||||
self.digest = None
|
||||
self.next_byte = 0
|
||||
self.lock = _threading.Lock()
|
||||
try:
|
||||
import hashlib
|
||||
self.hash = hashlib.sha1()
|
||||
self.hash_len = 20
|
||||
except:
|
||||
try:
|
||||
import sha
|
||||
self.hash = sha.new()
|
||||
self.hash_len = 20
|
||||
except:
|
||||
import md5
|
||||
self.hash = md5.new()
|
||||
self.hash_len = 16
|
||||
self.pool = bytearray(b'\0' * self.hash_len)
|
||||
if seed is not None:
|
||||
self.stir(bytearray(seed))
|
||||
self.seeded = True
|
||||
else:
|
||||
self.seeded = False
|
||||
|
||||
def stir(self, entropy, already_locked=False):
|
||||
if not already_locked:
|
||||
self.lock.acquire()
|
||||
try:
|
||||
for c in entropy:
|
||||
if self.pool_index == self.hash_len:
|
||||
self.pool_index = 0
|
||||
b = c & 0xff
|
||||
self.pool[self.pool_index] ^= b
|
||||
self.pool_index += 1
|
||||
finally:
|
||||
if not already_locked:
|
||||
self.lock.release()
|
||||
|
||||
def _maybe_seed(self):
|
||||
if not self.seeded:
|
||||
try:
|
||||
seed = os.urandom(16)
|
||||
except:
|
||||
try:
|
||||
r = open('/dev/urandom', 'rb', 0)
|
||||
try:
|
||||
seed = r.read(16)
|
||||
finally:
|
||||
r.close()
|
||||
except:
|
||||
seed = str(time.time())
|
||||
self.seeded = True
|
||||
seed = bytearray(seed)
|
||||
self.stir(seed, True)
|
||||
|
||||
def random_8(self):
|
||||
self.lock.acquire()
|
||||
try:
|
||||
self._maybe_seed()
|
||||
if self.digest is None or self.next_byte == self.hash_len:
|
||||
self.hash.update(binary_type(self.pool))
|
||||
self.digest = bytearray(self.hash.digest())
|
||||
self.stir(self.digest, True)
|
||||
self.next_byte = 0
|
||||
value = self.digest[self.next_byte]
|
||||
self.next_byte += 1
|
||||
finally:
|
||||
self.lock.release()
|
||||
return value
|
||||
|
||||
def random_16(self):
|
||||
return self.random_8() * 256 + self.random_8()
|
||||
|
||||
def random_32(self):
|
||||
return self.random_16() * 65536 + self.random_16()
|
||||
|
||||
def random_between(self, first, last):
|
||||
size = last - first + 1
|
||||
if size > long(4294967296):
|
||||
raise ValueError('too big')
|
||||
if size > 65536:
|
||||
rand = self.random_32
|
||||
max = long(4294967295)
|
||||
elif size > 256:
|
||||
rand = self.random_16
|
||||
max = 65535
|
||||
else:
|
||||
rand = self.random_8
|
||||
max = 255
|
||||
return (first + size * rand() // (max + 1))
|
||||
|
||||
pool = EntropyPool()
|
||||
|
||||
|
||||
def random_16():
|
||||
return pool.random_16()
|
||||
|
||||
|
||||
def between(first, last):
|
||||
return pool.random_between(first, last)
|
124
lib/dns/exception.py
Normal file
124
lib/dns/exception.py
Normal file
@@ -0,0 +1,124 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""Common DNS Exceptions."""
|
||||
|
||||
|
||||
class DNSException(Exception):
|
||||
|
||||
"""Abstract base class shared by all dnspython exceptions.
|
||||
|
||||
It supports two basic modes of operation:
|
||||
|
||||
a) Old/compatible mode is used if __init__ was called with
|
||||
empty **kwargs.
|
||||
In compatible mode all *args are passed to standard Python Exception class
|
||||
as before and all *args are printed by standard __str__ implementation.
|
||||
Class variable msg (or doc string if msg is None) is returned from str()
|
||||
if *args is empty.
|
||||
|
||||
b) New/parametrized mode is used if __init__ was called with
|
||||
non-empty **kwargs.
|
||||
In the new mode *args has to be empty and all kwargs has to exactly match
|
||||
set in class variable self.supp_kwargs. All kwargs are stored inside
|
||||
self.kwargs and used in new __str__ implementation to construct
|
||||
formatted message based on self.fmt string.
|
||||
|
||||
In the simplest case it is enough to override supp_kwargs and fmt
|
||||
class variables to get nice parametrized messages.
|
||||
"""
|
||||
msg = None # non-parametrized message
|
||||
supp_kwargs = set() # accepted parameters for _fmt_kwargs (sanity check)
|
||||
fmt = None # message parametrized with results from _fmt_kwargs
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._check_params(*args, **kwargs)
|
||||
self._check_kwargs(**kwargs)
|
||||
self.kwargs = kwargs
|
||||
if self.msg is None:
|
||||
# doc string is better implicit message than empty string
|
||||
self.msg = self.__doc__
|
||||
if args:
|
||||
super(DNSException, self).__init__(*args)
|
||||
else:
|
||||
super(DNSException, self).__init__(self.msg)
|
||||
|
||||
def _check_params(self, *args, **kwargs):
|
||||
"""Old exceptions supported only args and not kwargs.
|
||||
|
||||
For sanity we do not allow to mix old and new behavior."""
|
||||
if args or kwargs:
|
||||
assert bool(args) != bool(kwargs), \
|
||||
'keyword arguments are mutually exclusive with positional args'
|
||||
|
||||
def _check_kwargs(self, **kwargs):
|
||||
if kwargs:
|
||||
assert set(kwargs.keys()) == self.supp_kwargs, \
|
||||
'following set of keyword args is required: %s' % (
|
||||
self.supp_kwargs)
|
||||
|
||||
def _fmt_kwargs(self, **kwargs):
|
||||
"""Format kwargs before printing them.
|
||||
|
||||
Resulting dictionary has to have keys necessary for str.format call
|
||||
on fmt class variable.
|
||||
"""
|
||||
fmtargs = {}
|
||||
for kw, data in kwargs.items():
|
||||
if isinstance(data, (list, set)):
|
||||
# convert list of <someobj> to list of str(<someobj>)
|
||||
fmtargs[kw] = list(map(str, data))
|
||||
if len(fmtargs[kw]) == 1:
|
||||
# remove list brackets [] from single-item lists
|
||||
fmtargs[kw] = fmtargs[kw].pop()
|
||||
else:
|
||||
fmtargs[kw] = data
|
||||
return fmtargs
|
||||
|
||||
def __str__(self):
|
||||
if self.kwargs and self.fmt:
|
||||
# provide custom message constructed from keyword arguments
|
||||
fmtargs = self._fmt_kwargs(**self.kwargs)
|
||||
return self.fmt.format(**fmtargs)
|
||||
else:
|
||||
# print *args directly in the same way as old DNSException
|
||||
return super(DNSException, self).__str__()
|
||||
|
||||
|
||||
class FormError(DNSException):
|
||||
|
||||
"""DNS message is malformed."""
|
||||
|
||||
|
||||
class SyntaxError(DNSException):
|
||||
|
||||
"""Text input is malformed."""
|
||||
|
||||
|
||||
class UnexpectedEnd(SyntaxError):
|
||||
|
||||
"""Text input ended unexpectedly."""
|
||||
|
||||
|
||||
class TooBig(DNSException):
|
||||
|
||||
"""The DNS message is too big."""
|
||||
|
||||
|
||||
class Timeout(DNSException):
|
||||
|
||||
"""The DNS operation timed out."""
|
||||
supp_kwargs = set(['timeout'])
|
||||
fmt = "The DNS operation timed out after {timeout} seconds"
|
112
lib/dns/flags.py
Normal file
112
lib/dns/flags.py
Normal file
@@ -0,0 +1,112 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS Message Flags."""
|
||||
|
||||
# Standard DNS flags
|
||||
|
||||
QR = 0x8000
|
||||
AA = 0x0400
|
||||
TC = 0x0200
|
||||
RD = 0x0100
|
||||
RA = 0x0080
|
||||
AD = 0x0020
|
||||
CD = 0x0010
|
||||
|
||||
# EDNS flags
|
||||
|
||||
DO = 0x8000
|
||||
|
||||
_by_text = {
|
||||
'QR': QR,
|
||||
'AA': AA,
|
||||
'TC': TC,
|
||||
'RD': RD,
|
||||
'RA': RA,
|
||||
'AD': AD,
|
||||
'CD': CD
|
||||
}
|
||||
|
||||
_edns_by_text = {
|
||||
'DO': DO
|
||||
}
|
||||
|
||||
|
||||
# We construct the inverse mappings programmatically to ensure that we
|
||||
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
|
||||
# would cause the mappings not to be true inverses.
|
||||
|
||||
_by_value = dict((y, x) for x, y in _by_text.items())
|
||||
|
||||
_edns_by_value = dict((y, x) for x, y in _edns_by_text.items())
|
||||
|
||||
|
||||
def _order_flags(table):
|
||||
order = list(table.items())
|
||||
order.sort()
|
||||
order.reverse()
|
||||
return order
|
||||
|
||||
_flags_order = _order_flags(_by_value)
|
||||
|
||||
_edns_flags_order = _order_flags(_edns_by_value)
|
||||
|
||||
|
||||
def _from_text(text, table):
|
||||
flags = 0
|
||||
tokens = text.split()
|
||||
for t in tokens:
|
||||
flags = flags | table[t.upper()]
|
||||
return flags
|
||||
|
||||
|
||||
def _to_text(flags, table, order):
|
||||
text_flags = []
|
||||
for k, v in order:
|
||||
if flags & k != 0:
|
||||
text_flags.append(v)
|
||||
return ' '.join(text_flags)
|
||||
|
||||
|
||||
def from_text(text):
|
||||
"""Convert a space-separated list of flag text values into a flags
|
||||
value.
|
||||
@rtype: int"""
|
||||
|
||||
return _from_text(text, _by_text)
|
||||
|
||||
|
||||
def to_text(flags):
|
||||
"""Convert a flags value into a space-separated list of flag text
|
||||
values.
|
||||
@rtype: string"""
|
||||
|
||||
return _to_text(flags, _by_value, _flags_order)
|
||||
|
||||
|
||||
def edns_from_text(text):
|
||||
"""Convert a space-separated list of EDNS flag text values into a EDNS
|
||||
flags value.
|
||||
@rtype: int"""
|
||||
|
||||
return _from_text(text, _edns_by_text)
|
||||
|
||||
|
||||
def edns_to_text(flags):
|
||||
"""Convert an EDNS flags value into a space-separated list of EDNS flag
|
||||
text values.
|
||||
@rtype: string"""
|
||||
|
||||
return _to_text(flags, _edns_by_value, _edns_flags_order)
|
65
lib/dns/grange.py
Normal file
65
lib/dns/grange.py
Normal file
@@ -0,0 +1,65 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS GENERATE range conversion."""
|
||||
|
||||
import dns
|
||||
|
||||
|
||||
def from_text(text):
|
||||
"""Convert the text form of a range in a GENERATE statement to an
|
||||
integer.
|
||||
|
||||
@param text: the textual range
|
||||
@type text: string
|
||||
@return: The start, stop and step values.
|
||||
@rtype: tuple
|
||||
"""
|
||||
# TODO, figure out the bounds on start, stop and step.
|
||||
|
||||
step = 1
|
||||
cur = ''
|
||||
state = 0
|
||||
# state 0 1 2 3 4
|
||||
# x - y / z
|
||||
for c in text:
|
||||
if c == '-' and state == 0:
|
||||
start = int(cur)
|
||||
cur = ''
|
||||
state = 2
|
||||
elif c == '/':
|
||||
stop = int(cur)
|
||||
cur = ''
|
||||
state = 4
|
||||
elif c.isdigit():
|
||||
cur += c
|
||||
else:
|
||||
raise dns.exception.SyntaxError("Could not parse %s" % (c))
|
||||
|
||||
if state in (1, 3):
|
||||
raise dns.exception.SyntaxError
|
||||
|
||||
if state == 2:
|
||||
stop = int(cur)
|
||||
|
||||
if state == 4:
|
||||
step = int(cur)
|
||||
|
||||
assert step >= 1
|
||||
assert start >= 0
|
||||
assert start <= stop
|
||||
# TODO, can start == stop?
|
||||
|
||||
return (start, stop, step)
|
32
lib/dns/hash.py
Normal file
32
lib/dns/hash.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# Copyright (C) 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""Hashing backwards compatibility wrapper"""
|
||||
|
||||
import sys
|
||||
import hashlib
|
||||
|
||||
|
||||
hashes = {}
|
||||
hashes['MD5'] = hashlib.md5
|
||||
hashes['SHA1'] = hashlib.sha1
|
||||
hashes['SHA224'] = hashlib.sha224
|
||||
hashes['SHA256'] = hashlib.sha256
|
||||
hashes['SHA384'] = hashlib.sha384
|
||||
hashes['SHA512'] = hashlib.sha512
|
||||
|
||||
|
||||
def get(algorithm):
|
||||
return hashes[algorithm.upper()]
|
111
lib/dns/inet.py
Normal file
111
lib/dns/inet.py
Normal file
@@ -0,0 +1,111 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""Generic Internet address helper functions."""
|
||||
|
||||
import socket
|
||||
|
||||
import dns.ipv4
|
||||
import dns.ipv6
|
||||
|
||||
|
||||
# We assume that AF_INET is always defined.
|
||||
|
||||
AF_INET = socket.AF_INET
|
||||
|
||||
# AF_INET6 might not be defined in the socket module, but we need it.
|
||||
# We'll try to use the socket module's value, and if it doesn't work,
|
||||
# we'll use our own value.
|
||||
|
||||
try:
|
||||
AF_INET6 = socket.AF_INET6
|
||||
except AttributeError:
|
||||
AF_INET6 = 9999
|
||||
|
||||
|
||||
def inet_pton(family, text):
|
||||
"""Convert the textual form of a network address into its binary form.
|
||||
|
||||
@param family: the address family
|
||||
@type family: int
|
||||
@param text: the textual address
|
||||
@type text: string
|
||||
@raises NotImplementedError: the address family specified is not
|
||||
implemented.
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
if family == AF_INET:
|
||||
return dns.ipv4.inet_aton(text)
|
||||
elif family == AF_INET6:
|
||||
return dns.ipv6.inet_aton(text)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def inet_ntop(family, address):
|
||||
"""Convert the binary form of a network address into its textual form.
|
||||
|
||||
@param family: the address family
|
||||
@type family: int
|
||||
@param address: the binary address
|
||||
@type address: string
|
||||
@raises NotImplementedError: the address family specified is not
|
||||
implemented.
|
||||
@rtype: string
|
||||
"""
|
||||
if family == AF_INET:
|
||||
return dns.ipv4.inet_ntoa(address)
|
||||
elif family == AF_INET6:
|
||||
return dns.ipv6.inet_ntoa(address)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def af_for_address(text):
|
||||
"""Determine the address family of a textual-form network address.
|
||||
|
||||
@param text: the textual address
|
||||
@type text: string
|
||||
@raises ValueError: the address family cannot be determined from the input.
|
||||
@rtype: int
|
||||
"""
|
||||
try:
|
||||
dns.ipv4.inet_aton(text)
|
||||
return AF_INET
|
||||
except:
|
||||
try:
|
||||
dns.ipv6.inet_aton(text)
|
||||
return AF_INET6
|
||||
except:
|
||||
raise ValueError
|
||||
|
||||
|
||||
def is_multicast(text):
|
||||
"""Is the textual-form network address a multicast address?
|
||||
|
||||
@param text: the textual address
|
||||
@raises ValueError: the address family cannot be determined from the input.
|
||||
@rtype: bool
|
||||
"""
|
||||
try:
|
||||
first = ord(dns.ipv4.inet_aton(text)[0])
|
||||
return (first >= 224 and first <= 239)
|
||||
except:
|
||||
try:
|
||||
first = ord(dns.ipv6.inet_aton(text)[0])
|
||||
return (first == 255)
|
||||
except:
|
||||
raise ValueError
|
59
lib/dns/ipv4.py
Normal file
59
lib/dns/ipv4.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""IPv4 helper functions."""
|
||||
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
from ._compat import binary_type
|
||||
|
||||
def inet_ntoa(address):
|
||||
"""Convert an IPv4 address in network form to text form.
|
||||
|
||||
@param address: The IPv4 address
|
||||
@type address: string
|
||||
@returns: string
|
||||
"""
|
||||
if len(address) != 4:
|
||||
raise dns.exception.SyntaxError
|
||||
if not isinstance(address, bytearray):
|
||||
address = bytearray(address)
|
||||
return (u'%u.%u.%u.%u' % (address[0], address[1],
|
||||
address[2], address[3])).encode()
|
||||
|
||||
def inet_aton(text):
|
||||
"""Convert an IPv4 address in text form to network form.
|
||||
|
||||
@param text: The IPv4 address
|
||||
@type text: string
|
||||
@returns: string
|
||||
"""
|
||||
if not isinstance(text, binary_type):
|
||||
text = text.encode()
|
||||
parts = text.split(b'.')
|
||||
if len(parts) != 4:
|
||||
raise dns.exception.SyntaxError
|
||||
for part in parts:
|
||||
if not part.isdigit():
|
||||
raise dns.exception.SyntaxError
|
||||
if len(part) > 1 and part[0] == '0':
|
||||
# No leading zeros
|
||||
raise dns.exception.SyntaxError
|
||||
try:
|
||||
bytes = [int(part) for part in parts]
|
||||
return struct.pack('BBBB', *bytes)
|
||||
except:
|
||||
raise dns.exception.SyntaxError
|
172
lib/dns/ipv6.py
Normal file
172
lib/dns/ipv6.py
Normal file
@@ -0,0 +1,172 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""IPv6 helper functions."""
|
||||
|
||||
import re
|
||||
import binascii
|
||||
|
||||
import dns.exception
|
||||
import dns.ipv4
|
||||
from ._compat import xrange, binary_type
|
||||
|
||||
_leading_zero = re.compile(b'0+([0-9a-f]+)')
|
||||
|
||||
def inet_ntoa(address):
|
||||
"""Convert a network format IPv6 address into text.
|
||||
|
||||
@param address: the binary address
|
||||
@type address: string
|
||||
@rtype: string
|
||||
@raises ValueError: the address isn't 16 bytes long
|
||||
"""
|
||||
|
||||
if len(address) != 16:
|
||||
raise ValueError("IPv6 addresses are 16 bytes long")
|
||||
hex = binascii.hexlify(address)
|
||||
chunks = []
|
||||
i = 0
|
||||
l = len(hex)
|
||||
while i < l:
|
||||
chunk = hex[i : i + 4]
|
||||
# strip leading zeros. we do this with an re instead of
|
||||
# with lstrip() because lstrip() didn't support chars until
|
||||
# python 2.2.2
|
||||
m = _leading_zero.match(chunk)
|
||||
if not m is None:
|
||||
chunk = m.group(1)
|
||||
chunks.append(chunk)
|
||||
i += 4
|
||||
#
|
||||
# Compress the longest subsequence of 0-value chunks to ::
|
||||
#
|
||||
best_start = 0
|
||||
best_len = 0
|
||||
start = -1
|
||||
last_was_zero = False
|
||||
for i in xrange(8):
|
||||
if chunks[i] != b'0':
|
||||
if last_was_zero:
|
||||
end = i
|
||||
current_len = end - start
|
||||
if current_len > best_len:
|
||||
best_start = start
|
||||
best_len = current_len
|
||||
last_was_zero = False
|
||||
elif not last_was_zero:
|
||||
start = i
|
||||
last_was_zero = True
|
||||
if last_was_zero:
|
||||
end = 8
|
||||
current_len = end - start
|
||||
if current_len > best_len:
|
||||
best_start = start
|
||||
best_len = current_len
|
||||
if best_len > 1:
|
||||
if best_start == 0 and \
|
||||
(best_len == 6 or
|
||||
best_len == 5 and chunks[5] == b'ffff'):
|
||||
# We have an embedded IPv4 address
|
||||
if best_len == 6:
|
||||
prefix = b'::'
|
||||
else:
|
||||
prefix = b'::ffff:'
|
||||
hex = prefix + dns.ipv4.inet_ntoa(address[12:])
|
||||
else:
|
||||
hex = b':'.join(chunks[:best_start]) + b'::' + \
|
||||
b':'.join(chunks[best_start + best_len:])
|
||||
else:
|
||||
hex = b':'.join(chunks)
|
||||
return hex
|
||||
|
||||
_v4_ending = re.compile(b'(.*):(\d+\.\d+\.\d+\.\d+)$')
|
||||
_colon_colon_start = re.compile(b'::.*')
|
||||
_colon_colon_end = re.compile(b'.*::$')
|
||||
|
||||
def inet_aton(text):
|
||||
"""Convert a text format IPv6 address into network format.
|
||||
|
||||
@param text: the textual address
|
||||
@type text: string
|
||||
@rtype: string
|
||||
@raises dns.exception.SyntaxError: the text was not properly formatted
|
||||
"""
|
||||
|
||||
#
|
||||
# Our aim here is not something fast; we just want something that works.
|
||||
#
|
||||
if not isinstance(text, binary_type):
|
||||
text = text.encode()
|
||||
|
||||
if text == b'::':
|
||||
text = b'0::'
|
||||
#
|
||||
# Get rid of the icky dot-quad syntax if we have it.
|
||||
#
|
||||
m = _v4_ending.match(text)
|
||||
if not m is None:
|
||||
b = bytearray(dns.ipv4.inet_aton(m.group(2)))
|
||||
text = (u"%s:%02x%02x:%02x%02x" % (m.group(1).decode(), b[0], b[1],
|
||||
b[2], b[3])).encode()
|
||||
#
|
||||
# Try to turn '::<whatever>' into ':<whatever>'; if no match try to
|
||||
# turn '<whatever>::' into '<whatever>:'
|
||||
#
|
||||
m = _colon_colon_start.match(text)
|
||||
if not m is None:
|
||||
text = text[1:]
|
||||
else:
|
||||
m = _colon_colon_end.match(text)
|
||||
if not m is None:
|
||||
text = text[:-1]
|
||||
#
|
||||
# Now canonicalize into 8 chunks of 4 hex digits each
|
||||
#
|
||||
chunks = text.split(b':')
|
||||
l = len(chunks)
|
||||
if l > 8:
|
||||
raise dns.exception.SyntaxError
|
||||
seen_empty = False
|
||||
canonical = []
|
||||
for c in chunks:
|
||||
if c == b'':
|
||||
if seen_empty:
|
||||
raise dns.exception.SyntaxError
|
||||
seen_empty = True
|
||||
for i in xrange(0, 8 - l + 1):
|
||||
canonical.append(b'0000')
|
||||
else:
|
||||
lc = len(c)
|
||||
if lc > 4:
|
||||
raise dns.exception.SyntaxError
|
||||
if lc != 4:
|
||||
c = (b'0' * (4 - lc)) + c
|
||||
canonical.append(c)
|
||||
if l < 8 and not seen_empty:
|
||||
raise dns.exception.SyntaxError
|
||||
text = b''.join(canonical)
|
||||
|
||||
#
|
||||
# Finally we can go to binary.
|
||||
#
|
||||
try:
|
||||
return binascii.unhexlify(text)
|
||||
except (binascii.Error, TypeError):
|
||||
raise dns.exception.SyntaxError
|
||||
|
||||
_mapped_prefix = b'\x00' * 10 + b'\xff\xff'
|
||||
|
||||
def is_mapped(address):
|
||||
return address.startswith(_mapped_prefix)
|
1153
lib/dns/message.py
Normal file
1153
lib/dns/message.py
Normal file
File diff suppressed because it is too large
Load Diff
763
lib/dns/name.py
Normal file
763
lib/dns/name.py
Normal file
@@ -0,0 +1,763 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS Names.
|
||||
|
||||
@var root: The DNS root name.
|
||||
@type root: dns.name.Name object
|
||||
@var empty: The empty DNS name.
|
||||
@type empty: dns.name.Name object
|
||||
"""
|
||||
|
||||
from io import BytesIO
|
||||
import struct
|
||||
import sys
|
||||
import copy
|
||||
import encodings.idna
|
||||
|
||||
import dns.exception
|
||||
import dns.wiredata
|
||||
|
||||
from ._compat import long, binary_type, text_type, unichr
|
||||
|
||||
try:
|
||||
maxint = sys.maxint
|
||||
except:
|
||||
maxint = (1 << (8 * struct.calcsize("P"))) / 2 - 1
|
||||
|
||||
NAMERELN_NONE = 0
|
||||
NAMERELN_SUPERDOMAIN = 1
|
||||
NAMERELN_SUBDOMAIN = 2
|
||||
NAMERELN_EQUAL = 3
|
||||
NAMERELN_COMMONANCESTOR = 4
|
||||
|
||||
|
||||
class EmptyLabel(dns.exception.SyntaxError):
|
||||
|
||||
"""A DNS label is empty."""
|
||||
|
||||
|
||||
class BadEscape(dns.exception.SyntaxError):
|
||||
|
||||
"""An escaped code in a text format of DNS name is invalid."""
|
||||
|
||||
|
||||
class BadPointer(dns.exception.FormError):
|
||||
|
||||
"""A DNS compression pointer points forward instead of backward."""
|
||||
|
||||
|
||||
class BadLabelType(dns.exception.FormError):
|
||||
|
||||
"""The label type in DNS name wire format is unknown."""
|
||||
|
||||
|
||||
class NeedAbsoluteNameOrOrigin(dns.exception.DNSException):
|
||||
|
||||
"""An attempt was made to convert a non-absolute name to
|
||||
wire when there was also a non-absolute (or missing) origin."""
|
||||
|
||||
|
||||
class NameTooLong(dns.exception.FormError):
|
||||
|
||||
"""A DNS name is > 255 octets long."""
|
||||
|
||||
|
||||
class LabelTooLong(dns.exception.SyntaxError):
|
||||
|
||||
"""A DNS label is > 63 octets long."""
|
||||
|
||||
|
||||
class AbsoluteConcatenation(dns.exception.DNSException):
|
||||
|
||||
"""An attempt was made to append anything other than the
|
||||
empty name to an absolute DNS name."""
|
||||
|
||||
|
||||
class NoParent(dns.exception.DNSException):
|
||||
|
||||
"""An attempt was made to get the parent of the root name
|
||||
or the empty name."""
|
||||
|
||||
_escaped = bytearray(b'"().;\\@$')
|
||||
|
||||
|
||||
def _escapify(label, unicode_mode=False):
|
||||
"""Escape the characters in label which need it.
|
||||
@param unicode_mode: escapify only special and whitespace (<= 0x20)
|
||||
characters
|
||||
@returns: the escaped string
|
||||
@rtype: string"""
|
||||
if not unicode_mode:
|
||||
text = ''
|
||||
if isinstance(label, text_type):
|
||||
label = label.encode()
|
||||
for c in bytearray(label):
|
||||
packed = struct.pack('!B', c).decode()
|
||||
if c in _escaped:
|
||||
text += '\\' + packed
|
||||
elif c > 0x20 and c < 0x7F:
|
||||
text += packed
|
||||
else:
|
||||
text += '\\%03d' % c
|
||||
return text.encode()
|
||||
|
||||
text = u''
|
||||
if isinstance(label, binary_type):
|
||||
label = label.decode()
|
||||
for c in label:
|
||||
if c > u'\x20' and c < u'\x7f':
|
||||
text += c
|
||||
else:
|
||||
if c >= u'\x7f':
|
||||
text += c
|
||||
else:
|
||||
text += u'\\%03d' % c
|
||||
return text
|
||||
|
||||
|
||||
def _validate_labels(labels):
|
||||
"""Check for empty labels in the middle of a label sequence,
|
||||
labels that are too long, and for too many labels.
|
||||
@raises NameTooLong: the name as a whole is too long
|
||||
@raises EmptyLabel: a label is empty (i.e. the root label) and appears
|
||||
in a position other than the end of the label sequence"""
|
||||
|
||||
l = len(labels)
|
||||
total = 0
|
||||
i = -1
|
||||
j = 0
|
||||
for label in labels:
|
||||
ll = len(label)
|
||||
total += ll + 1
|
||||
if ll > 63:
|
||||
raise LabelTooLong
|
||||
if i < 0 and label == b'':
|
||||
i = j
|
||||
j += 1
|
||||
if total > 255:
|
||||
raise NameTooLong
|
||||
if i >= 0 and i != l - 1:
|
||||
raise EmptyLabel
|
||||
|
||||
|
||||
def _ensure_bytes(label):
|
||||
if isinstance(label, binary_type):
|
||||
return label
|
||||
if isinstance(label, text_type):
|
||||
return label.encode()
|
||||
raise ValueError
|
||||
|
||||
|
||||
class Name(object):
|
||||
|
||||
"""A DNS name.
|
||||
|
||||
The dns.name.Name class represents a DNS name as a tuple of labels.
|
||||
Instances of the class are immutable.
|
||||
|
||||
@ivar labels: The tuple of labels in the name. Each label is a string of
|
||||
up to 63 octets."""
|
||||
|
||||
__slots__ = ['labels']
|
||||
|
||||
def __init__(self, labels):
|
||||
"""Initialize a domain name from a list of labels.
|
||||
@param labels: the labels
|
||||
@type labels: any iterable whose values are strings
|
||||
"""
|
||||
labels = [_ensure_bytes(x) for x in labels]
|
||||
super(Name, self).__setattr__('labels', tuple(labels))
|
||||
_validate_labels(self.labels)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
raise TypeError("object doesn't support attribute assignment")
|
||||
|
||||
def __copy__(self):
|
||||
return Name(self.labels)
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
return Name(copy.deepcopy(self.labels, memo))
|
||||
|
||||
def __getstate__(self):
|
||||
return {'labels': self.labels}
|
||||
|
||||
def __setstate__(self, state):
|
||||
super(Name, self).__setattr__('labels', state['labels'])
|
||||
_validate_labels(self.labels)
|
||||
|
||||
def is_absolute(self):
|
||||
"""Is the most significant label of this name the root label?
|
||||
@rtype: bool
|
||||
"""
|
||||
|
||||
return len(self.labels) > 0 and self.labels[-1] == b''
|
||||
|
||||
def is_wild(self):
|
||||
"""Is this name wild? (I.e. Is the least significant label '*'?)
|
||||
@rtype: bool
|
||||
"""
|
||||
|
||||
return len(self.labels) > 0 and self.labels[0] == b'*'
|
||||
|
||||
def __hash__(self):
|
||||
"""Return a case-insensitive hash of the name.
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
h = long(0)
|
||||
for label in self.labels:
|
||||
for c in bytearray(label.lower()):
|
||||
h += (h << 3) + c
|
||||
return int(h % maxint)
|
||||
|
||||
def fullcompare(self, other):
|
||||
"""Compare two names, returning a 3-tuple (relation, order, nlabels).
|
||||
|
||||
I{relation} describes the relation ship between the names,
|
||||
and is one of: dns.name.NAMERELN_NONE,
|
||||
dns.name.NAMERELN_SUPERDOMAIN, dns.name.NAMERELN_SUBDOMAIN,
|
||||
dns.name.NAMERELN_EQUAL, or dns.name.NAMERELN_COMMONANCESTOR
|
||||
|
||||
I{order} is < 0 if self < other, > 0 if self > other, and ==
|
||||
0 if self == other. A relative name is always less than an
|
||||
absolute name. If both names have the same relativity, then
|
||||
the DNSSEC order relation is used to order them.
|
||||
|
||||
I{nlabels} is the number of significant labels that the two names
|
||||
have in common.
|
||||
"""
|
||||
|
||||
sabs = self.is_absolute()
|
||||
oabs = other.is_absolute()
|
||||
if sabs != oabs:
|
||||
if sabs:
|
||||
return (NAMERELN_NONE, 1, 0)
|
||||
else:
|
||||
return (NAMERELN_NONE, -1, 0)
|
||||
l1 = len(self.labels)
|
||||
l2 = len(other.labels)
|
||||
ldiff = l1 - l2
|
||||
if ldiff < 0:
|
||||
l = l1
|
||||
else:
|
||||
l = l2
|
||||
|
||||
order = 0
|
||||
nlabels = 0
|
||||
namereln = NAMERELN_NONE
|
||||
while l > 0:
|
||||
l -= 1
|
||||
l1 -= 1
|
||||
l2 -= 1
|
||||
label1 = self.labels[l1].lower()
|
||||
label2 = other.labels[l2].lower()
|
||||
if label1 < label2:
|
||||
order = -1
|
||||
if nlabels > 0:
|
||||
namereln = NAMERELN_COMMONANCESTOR
|
||||
return (namereln, order, nlabels)
|
||||
elif label1 > label2:
|
||||
order = 1
|
||||
if nlabels > 0:
|
||||
namereln = NAMERELN_COMMONANCESTOR
|
||||
return (namereln, order, nlabels)
|
||||
nlabels += 1
|
||||
order = ldiff
|
||||
if ldiff < 0:
|
||||
namereln = NAMERELN_SUPERDOMAIN
|
||||
elif ldiff > 0:
|
||||
namereln = NAMERELN_SUBDOMAIN
|
||||
else:
|
||||
namereln = NAMERELN_EQUAL
|
||||
return (namereln, order, nlabels)
|
||||
|
||||
def is_subdomain(self, other):
|
||||
"""Is self a subdomain of other?
|
||||
|
||||
The notion of subdomain includes equality.
|
||||
@rtype: bool
|
||||
"""
|
||||
|
||||
(nr, o, nl) = self.fullcompare(other)
|
||||
if nr == NAMERELN_SUBDOMAIN or nr == NAMERELN_EQUAL:
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_superdomain(self, other):
|
||||
"""Is self a superdomain of other?
|
||||
|
||||
The notion of subdomain includes equality.
|
||||
@rtype: bool
|
||||
"""
|
||||
|
||||
(nr, o, nl) = self.fullcompare(other)
|
||||
if nr == NAMERELN_SUPERDOMAIN or nr == NAMERELN_EQUAL:
|
||||
return True
|
||||
return False
|
||||
|
||||
def canonicalize(self):
|
||||
"""Return a name which is equal to the current name, but is in
|
||||
DNSSEC canonical form.
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
|
||||
return Name([x.lower() for x in self.labels])
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, Name):
|
||||
return self.fullcompare(other)[1] == 0
|
||||
else:
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
if isinstance(other, Name):
|
||||
return self.fullcompare(other)[1] != 0
|
||||
else:
|
||||
return True
|
||||
|
||||
def __lt__(self, other):
|
||||
if isinstance(other, Name):
|
||||
return self.fullcompare(other)[1] < 0
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __le__(self, other):
|
||||
if isinstance(other, Name):
|
||||
return self.fullcompare(other)[1] <= 0
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __ge__(self, other):
|
||||
if isinstance(other, Name):
|
||||
return self.fullcompare(other)[1] >= 0
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __gt__(self, other):
|
||||
if isinstance(other, Name):
|
||||
return self.fullcompare(other)[1] > 0
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __repr__(self):
|
||||
return '<DNS name ' + self.__str__() + '>'
|
||||
|
||||
def __str__(self):
|
||||
return self.to_text(False).decode()
|
||||
|
||||
def to_text(self, omit_final_dot=False):
|
||||
"""Convert name to text format.
|
||||
@param omit_final_dot: If True, don't emit the final dot (denoting the
|
||||
root label) for absolute names. The default is False.
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
if len(self.labels) == 0:
|
||||
return b'@'
|
||||
if len(self.labels) == 1 and self.labels[0] == b'':
|
||||
return b'.'
|
||||
if omit_final_dot and self.is_absolute():
|
||||
l = self.labels[:-1]
|
||||
else:
|
||||
l = self.labels
|
||||
s = b'.'.join(map(_escapify, l))
|
||||
return s
|
||||
|
||||
def to_unicode(self, omit_final_dot=False):
|
||||
"""Convert name to Unicode text format.
|
||||
|
||||
IDN ACE labels are converted to Unicode.
|
||||
|
||||
@param omit_final_dot: If True, don't emit the final dot (denoting the
|
||||
root label) for absolute names. The default is False.
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
if len(self.labels) == 0:
|
||||
return u'@'
|
||||
if len(self.labels) == 1 and self.labels[0] == '':
|
||||
return u'.'
|
||||
if omit_final_dot and self.is_absolute():
|
||||
l = self.labels[:-1]
|
||||
else:
|
||||
l = self.labels
|
||||
s = u'.'.join([_escapify(encodings.idna.ToUnicode(x), True)
|
||||
for x in l])
|
||||
return s
|
||||
|
||||
def to_digestable(self, origin=None):
|
||||
"""Convert name to a format suitable for digesting in hashes.
|
||||
|
||||
The name is canonicalized and converted to uncompressed wire format.
|
||||
|
||||
@param origin: If the name is relative and origin is not None, then
|
||||
origin will be appended to it.
|
||||
@type origin: dns.name.Name object
|
||||
@raises NeedAbsoluteNameOrOrigin: All names in wire format are
|
||||
absolute. If self is a relative name, then an origin must be supplied;
|
||||
if it is missing, then this exception is raised
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
if not self.is_absolute():
|
||||
if origin is None or not origin.is_absolute():
|
||||
raise NeedAbsoluteNameOrOrigin
|
||||
labels = list(self.labels)
|
||||
labels.extend(list(origin.labels))
|
||||
else:
|
||||
labels = self.labels
|
||||
dlabels = [struct.pack('!B%ds' % len(x), len(x), x.lower())
|
||||
for x in labels]
|
||||
return b''.join(dlabels)
|
||||
|
||||
def to_wire(self, file=None, compress=None, origin=None):
|
||||
"""Convert name to wire format, possibly compressing it.
|
||||
|
||||
@param file: the file where the name is emitted (typically
|
||||
a BytesIO file). If None, a string containing the wire name
|
||||
will be returned.
|
||||
@type file: file or None
|
||||
@param compress: The compression table. If None (the default) names
|
||||
will not be compressed.
|
||||
@type compress: dict
|
||||
@param origin: If the name is relative and origin is not None, then
|
||||
origin will be appended to it.
|
||||
@type origin: dns.name.Name object
|
||||
@raises NeedAbsoluteNameOrOrigin: All names in wire format are
|
||||
absolute. If self is a relative name, then an origin must be supplied;
|
||||
if it is missing, then this exception is raised
|
||||
"""
|
||||
|
||||
if file is None:
|
||||
file = BytesIO()
|
||||
want_return = True
|
||||
else:
|
||||
want_return = False
|
||||
|
||||
if not self.is_absolute():
|
||||
if origin is None or not origin.is_absolute():
|
||||
raise NeedAbsoluteNameOrOrigin
|
||||
labels = list(self.labels)
|
||||
labels.extend(list(origin.labels))
|
||||
else:
|
||||
labels = self.labels
|
||||
i = 0
|
||||
for label in labels:
|
||||
n = Name(labels[i:])
|
||||
i += 1
|
||||
if compress is not None:
|
||||
pos = compress.get(n)
|
||||
else:
|
||||
pos = None
|
||||
if pos is not None:
|
||||
value = 0xc000 + pos
|
||||
s = struct.pack('!H', value)
|
||||
file.write(s)
|
||||
break
|
||||
else:
|
||||
if compress is not None and len(n) > 1:
|
||||
pos = file.tell()
|
||||
if pos <= 0x3fff:
|
||||
compress[n] = pos
|
||||
l = len(label)
|
||||
file.write(struct.pack('!B', l))
|
||||
if l > 0:
|
||||
file.write(label)
|
||||
if want_return:
|
||||
return file.getvalue()
|
||||
|
||||
def __len__(self):
|
||||
"""The length of the name (in labels).
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
return len(self.labels)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self.labels[index]
|
||||
|
||||
def __getslice__(self, start, stop):
|
||||
return self.labels[start:stop]
|
||||
|
||||
def __add__(self, other):
|
||||
return self.concatenate(other)
|
||||
|
||||
def __sub__(self, other):
|
||||
return self.relativize(other)
|
||||
|
||||
def split(self, depth):
|
||||
"""Split a name into a prefix and suffix at depth.
|
||||
|
||||
@param depth: the number of labels in the suffix
|
||||
@type depth: int
|
||||
@raises ValueError: the depth was not >= 0 and <= the length of the
|
||||
name.
|
||||
@returns: the tuple (prefix, suffix)
|
||||
@rtype: tuple
|
||||
"""
|
||||
|
||||
l = len(self.labels)
|
||||
if depth == 0:
|
||||
return (self, dns.name.empty)
|
||||
elif depth == l:
|
||||
return (dns.name.empty, self)
|
||||
elif depth < 0 or depth > l:
|
||||
raise ValueError(
|
||||
'depth must be >= 0 and <= the length of the name')
|
||||
return (Name(self[: -depth]), Name(self[-depth:]))
|
||||
|
||||
def concatenate(self, other):
|
||||
"""Return a new name which is the concatenation of self and other.
|
||||
@rtype: dns.name.Name object
|
||||
@raises AbsoluteConcatenation: self is absolute and other is
|
||||
not the empty name
|
||||
"""
|
||||
|
||||
if self.is_absolute() and len(other) > 0:
|
||||
raise AbsoluteConcatenation
|
||||
labels = list(self.labels)
|
||||
labels.extend(list(other.labels))
|
||||
return Name(labels)
|
||||
|
||||
def relativize(self, origin):
|
||||
"""If self is a subdomain of origin, return a new name which is self
|
||||
relative to origin. Otherwise return self.
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
|
||||
if origin is not None and self.is_subdomain(origin):
|
||||
return Name(self[: -len(origin)])
|
||||
else:
|
||||
return self
|
||||
|
||||
def derelativize(self, origin):
|
||||
"""If self is a relative name, return a new name which is the
|
||||
concatenation of self and origin. Otherwise return self.
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
|
||||
if not self.is_absolute():
|
||||
return self.concatenate(origin)
|
||||
else:
|
||||
return self
|
||||
|
||||
def choose_relativity(self, origin=None, relativize=True):
|
||||
"""Return a name with the relativity desired by the caller. If
|
||||
origin is None, then self is returned. Otherwise, if
|
||||
relativize is true the name is relativized, and if relativize is
|
||||
false the name is derelativized.
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
|
||||
if origin:
|
||||
if relativize:
|
||||
return self.relativize(origin)
|
||||
else:
|
||||
return self.derelativize(origin)
|
||||
else:
|
||||
return self
|
||||
|
||||
def parent(self):
|
||||
"""Return the parent of the name.
|
||||
@rtype: dns.name.Name object
|
||||
@raises NoParent: the name is either the root name or the empty name,
|
||||
and thus has no parent.
|
||||
"""
|
||||
if self == root or self == empty:
|
||||
raise NoParent
|
||||
return Name(self.labels[1:])
|
||||
|
||||
root = Name([b''])
|
||||
empty = Name([])
|
||||
|
||||
|
||||
def from_unicode(text, origin=root):
|
||||
"""Convert unicode text into a Name object.
|
||||
|
||||
Labels are encoded in IDN ACE form.
|
||||
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
|
||||
if not isinstance(text, text_type):
|
||||
raise ValueError("input to from_unicode() must be a unicode string")
|
||||
if not (origin is None or isinstance(origin, Name)):
|
||||
raise ValueError("origin must be a Name or None")
|
||||
labels = []
|
||||
label = u''
|
||||
escaping = False
|
||||
edigits = 0
|
||||
total = 0
|
||||
if text == u'@':
|
||||
text = u''
|
||||
if text:
|
||||
if text == u'.':
|
||||
return Name([b'']) # no Unicode "u" on this constant!
|
||||
for c in text:
|
||||
if escaping:
|
||||
if edigits == 0:
|
||||
if c.isdigit():
|
||||
total = int(c)
|
||||
edigits += 1
|
||||
else:
|
||||
label += c
|
||||
escaping = False
|
||||
else:
|
||||
if not c.isdigit():
|
||||
raise BadEscape
|
||||
total *= 10
|
||||
total += int(c)
|
||||
edigits += 1
|
||||
if edigits == 3:
|
||||
escaping = False
|
||||
label += unichr(total)
|
||||
elif c in [u'.', u'\u3002', u'\uff0e', u'\uff61']:
|
||||
if len(label) == 0:
|
||||
raise EmptyLabel
|
||||
try:
|
||||
labels.append(encodings.idna.ToASCII(label))
|
||||
except UnicodeError:
|
||||
raise LabelTooLong
|
||||
label = u''
|
||||
elif c == u'\\':
|
||||
escaping = True
|
||||
edigits = 0
|
||||
total = 0
|
||||
else:
|
||||
label += c
|
||||
if escaping:
|
||||
raise BadEscape
|
||||
if len(label) > 0:
|
||||
try:
|
||||
labels.append(encodings.idna.ToASCII(label))
|
||||
except UnicodeError:
|
||||
raise LabelTooLong
|
||||
else:
|
||||
labels.append(b'')
|
||||
|
||||
if (len(labels) == 0 or labels[-1] != b'') and origin is not None:
|
||||
labels.extend(list(origin.labels))
|
||||
return Name(labels)
|
||||
|
||||
|
||||
def from_text(text, origin=root):
|
||||
"""Convert text into a Name object.
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
|
||||
if isinstance(text, text_type):
|
||||
return from_unicode(text, origin)
|
||||
if not isinstance(text, binary_type):
|
||||
raise ValueError("input to from_text() must be a string")
|
||||
if not (origin is None or isinstance(origin, Name)):
|
||||
raise ValueError("origin must be a Name or None")
|
||||
labels = []
|
||||
label = b''
|
||||
escaping = False
|
||||
edigits = 0
|
||||
total = 0
|
||||
if text == b'@':
|
||||
text = b''
|
||||
if text:
|
||||
if text == b'.':
|
||||
return Name([b''])
|
||||
for c in bytearray(text):
|
||||
byte_ = struct.pack('!B', c)
|
||||
if escaping:
|
||||
if edigits == 0:
|
||||
if byte_.isdigit():
|
||||
total = int(byte_)
|
||||
edigits += 1
|
||||
else:
|
||||
label += byte_
|
||||
escaping = False
|
||||
else:
|
||||
if not byte_.isdigit():
|
||||
raise BadEscape
|
||||
total *= 10
|
||||
total += int(byte_)
|
||||
edigits += 1
|
||||
if edigits == 3:
|
||||
escaping = False
|
||||
label += struct.pack('!B', total)
|
||||
elif byte_ == b'.':
|
||||
if len(label) == 0:
|
||||
raise EmptyLabel
|
||||
labels.append(label)
|
||||
label = b''
|
||||
elif byte_ == b'\\':
|
||||
escaping = True
|
||||
edigits = 0
|
||||
total = 0
|
||||
else:
|
||||
label += byte_
|
||||
if escaping:
|
||||
raise BadEscape
|
||||
if len(label) > 0:
|
||||
labels.append(label)
|
||||
else:
|
||||
labels.append(b'')
|
||||
if (len(labels) == 0 or labels[-1] != b'') and origin is not None:
|
||||
labels.extend(list(origin.labels))
|
||||
return Name(labels)
|
||||
|
||||
|
||||
def from_wire(message, current):
|
||||
"""Convert possibly compressed wire format into a Name.
|
||||
@param message: the entire DNS message
|
||||
@type message: string
|
||||
@param current: the offset of the beginning of the name from the start
|
||||
of the message
|
||||
@type current: int
|
||||
@raises dns.name.BadPointer: a compression pointer did not point backwards
|
||||
in the message
|
||||
@raises dns.name.BadLabelType: an invalid label type was encountered.
|
||||
@returns: a tuple consisting of the name that was read and the number
|
||||
of bytes of the wire format message which were consumed reading it
|
||||
@rtype: (dns.name.Name object, int) tuple
|
||||
"""
|
||||
|
||||
if not isinstance(message, binary_type):
|
||||
raise ValueError("input to from_wire() must be a byte string")
|
||||
message = dns.wiredata.maybe_wrap(message)
|
||||
labels = []
|
||||
biggest_pointer = current
|
||||
hops = 0
|
||||
count = message[current]
|
||||
current += 1
|
||||
cused = 1
|
||||
while count != 0:
|
||||
if count < 64:
|
||||
labels.append(message[current: current + count].unwrap())
|
||||
current += count
|
||||
if hops == 0:
|
||||
cused += count
|
||||
elif count >= 192:
|
||||
current = (count & 0x3f) * 256 + message[current]
|
||||
if hops == 0:
|
||||
cused += 1
|
||||
if current >= biggest_pointer:
|
||||
raise BadPointer
|
||||
biggest_pointer = current
|
||||
hops += 1
|
||||
else:
|
||||
raise BadLabelType
|
||||
count = message[current]
|
||||
current += 1
|
||||
if hops == 0:
|
||||
cused += 1
|
||||
labels.append('')
|
||||
return (Name(labels), cused)
|
104
lib/dns/namedict.py
Normal file
104
lib/dns/namedict.py
Normal file
@@ -0,0 +1,104 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
# Copyright (C) 2016 Coresec Systems AB
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND CORESEC SYSTEMS AB DISCLAIMS ALL
|
||||
# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL CORESEC
|
||||
# SYSTEMS AB BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
|
||||
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
||||
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
|
||||
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
|
||||
# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS name dictionary"""
|
||||
|
||||
import collections
|
||||
import dns.name
|
||||
from ._compat import xrange
|
||||
|
||||
|
||||
class NameDict(collections.MutableMapping):
|
||||
|
||||
"""A dictionary whose keys are dns.name.Name objects.
|
||||
@ivar max_depth: the maximum depth of the keys that have ever been
|
||||
added to the dictionary.
|
||||
@type max_depth: int
|
||||
@ivar max_depth_items: the number of items of maximum depth
|
||||
@type max_depth_items: int
|
||||
"""
|
||||
|
||||
__slots__ = ["max_depth", "max_depth_items", "__store"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.__store = dict()
|
||||
self.max_depth = 0
|
||||
self.max_depth_items = 0
|
||||
self.update(dict(*args, **kwargs))
|
||||
|
||||
def __update_max_depth(self, key):
|
||||
if len(key) == self.max_depth:
|
||||
self.max_depth_items = self.max_depth_items + 1
|
||||
elif len(key) > self.max_depth:
|
||||
self.max_depth = len(key)
|
||||
self.max_depth_items = 1
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.__store[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if not isinstance(key, dns.name.Name):
|
||||
raise ValueError('NameDict key must be a name')
|
||||
self.__store[key] = value
|
||||
self.__update_max_depth(key)
|
||||
|
||||
def __delitem__(self, key):
|
||||
value = self.__store.pop(key)
|
||||
if len(value) == self.max_depth:
|
||||
self.max_depth_items = self.max_depth_items - 1
|
||||
if self.max_depth_items == 0:
|
||||
self.max_depth = 0
|
||||
for k in self.__store:
|
||||
self.__update_max_depth(k)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__store)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.__store)
|
||||
|
||||
def has_key(self, key):
|
||||
return key in self.__store
|
||||
|
||||
def get_deepest_match(self, name):
|
||||
"""Find the deepest match to I{name} in the dictionary.
|
||||
|
||||
The deepest match is the longest name in the dictionary which is
|
||||
a superdomain of I{name}.
|
||||
|
||||
@param name: the name
|
||||
@type name: dns.name.Name object
|
||||
@rtype: (key, value) tuple
|
||||
"""
|
||||
|
||||
depth = len(name)
|
||||
if depth > self.max_depth:
|
||||
depth = self.max_depth
|
||||
for i in xrange(-depth, 0):
|
||||
n = dns.name.Name(name[i:])
|
||||
if n in self:
|
||||
return (n, self[n])
|
||||
v = self[dns.name.empty]
|
||||
return (dns.name.empty, v)
|
178
lib/dns/node.py
Normal file
178
lib/dns/node.py
Normal file
@@ -0,0 +1,178 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS nodes. A node is a set of rdatasets."""
|
||||
|
||||
from io import StringIO
|
||||
|
||||
import dns.rdataset
|
||||
import dns.rdatatype
|
||||
import dns.renderer
|
||||
|
||||
|
||||
class Node(object):
|
||||
|
||||
"""A DNS node.
|
||||
|
||||
A node is a set of rdatasets
|
||||
|
||||
@ivar rdatasets: the node's rdatasets
|
||||
@type rdatasets: list of dns.rdataset.Rdataset objects"""
|
||||
|
||||
__slots__ = ['rdatasets']
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize a DNS node.
|
||||
"""
|
||||
|
||||
self.rdatasets = []
|
||||
|
||||
def to_text(self, name, **kw):
|
||||
"""Convert a node to text format.
|
||||
|
||||
Each rdataset at the node is printed. Any keyword arguments
|
||||
to this method are passed on to the rdataset's to_text() method.
|
||||
@param name: the owner name of the rdatasets
|
||||
@type name: dns.name.Name object
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
s = StringIO()
|
||||
for rds in self.rdatasets:
|
||||
if len(rds) > 0:
|
||||
s.write(rds.to_text(name, **kw))
|
||||
s.write(u'\n')
|
||||
return s.getvalue()[:-1]
|
||||
|
||||
def __repr__(self):
|
||||
return '<DNS node ' + str(id(self)) + '>'
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Two nodes are equal if they have the same rdatasets.
|
||||
|
||||
@rtype: bool
|
||||
"""
|
||||
#
|
||||
# This is inefficient. Good thing we don't need to do it much.
|
||||
#
|
||||
for rd in self.rdatasets:
|
||||
if rd not in other.rdatasets:
|
||||
return False
|
||||
for rd in other.rdatasets:
|
||||
if rd not in self.rdatasets:
|
||||
return False
|
||||
return True
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.rdatasets)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.rdatasets)
|
||||
|
||||
def find_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE,
|
||||
create=False):
|
||||
"""Find an rdataset matching the specified properties in the
|
||||
current node.
|
||||
|
||||
@param rdclass: The class of the rdataset
|
||||
@type rdclass: int
|
||||
@param rdtype: The type of the rdataset
|
||||
@type rdtype: int
|
||||
@param covers: The covered type. Usually this value is
|
||||
dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or
|
||||
dns.rdatatype.RRSIG, then the covers value will be the rdata
|
||||
type the SIG/RRSIG covers. The library treats the SIG and RRSIG
|
||||
types as if they were a family of
|
||||
types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much
|
||||
easier to work with than if RRSIGs covering different rdata
|
||||
types were aggregated into a single RRSIG rdataset.
|
||||
@type covers: int
|
||||
@param create: If True, create the rdataset if it is not found.
|
||||
@type create: bool
|
||||
@raises KeyError: An rdataset of the desired type and class does
|
||||
not exist and I{create} is not True.
|
||||
@rtype: dns.rdataset.Rdataset object
|
||||
"""
|
||||
|
||||
for rds in self.rdatasets:
|
||||
if rds.match(rdclass, rdtype, covers):
|
||||
return rds
|
||||
if not create:
|
||||
raise KeyError
|
||||
rds = dns.rdataset.Rdataset(rdclass, rdtype)
|
||||
self.rdatasets.append(rds)
|
||||
return rds
|
||||
|
||||
def get_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE,
|
||||
create=False):
|
||||
"""Get an rdataset matching the specified properties in the
|
||||
current node.
|
||||
|
||||
None is returned if an rdataset of the specified type and
|
||||
class does not exist and I{create} is not True.
|
||||
|
||||
@param rdclass: The class of the rdataset
|
||||
@type rdclass: int
|
||||
@param rdtype: The type of the rdataset
|
||||
@type rdtype: int
|
||||
@param covers: The covered type.
|
||||
@type covers: int
|
||||
@param create: If True, create the rdataset if it is not found.
|
||||
@type create: bool
|
||||
@rtype: dns.rdataset.Rdataset object or None
|
||||
"""
|
||||
|
||||
try:
|
||||
rds = self.find_rdataset(rdclass, rdtype, covers, create)
|
||||
except KeyError:
|
||||
rds = None
|
||||
return rds
|
||||
|
||||
def delete_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE):
|
||||
"""Delete the rdataset matching the specified properties in the
|
||||
current node.
|
||||
|
||||
If a matching rdataset does not exist, it is not an error.
|
||||
|
||||
@param rdclass: The class of the rdataset
|
||||
@type rdclass: int
|
||||
@param rdtype: The type of the rdataset
|
||||
@type rdtype: int
|
||||
@param covers: The covered type.
|
||||
@type covers: int
|
||||
"""
|
||||
|
||||
rds = self.get_rdataset(rdclass, rdtype, covers)
|
||||
if rds is not None:
|
||||
self.rdatasets.remove(rds)
|
||||
|
||||
def replace_rdataset(self, replacement):
|
||||
"""Replace an rdataset.
|
||||
|
||||
It is not an error if there is no rdataset matching I{replacement}.
|
||||
|
||||
Ownership of the I{replacement} object is transferred to the node;
|
||||
in other words, this method does not store a copy of I{replacement}
|
||||
at the node, it stores I{replacement} itself.
|
||||
"""
|
||||
|
||||
if not isinstance(replacement, dns.rdataset.Rdataset):
|
||||
raise ValueError('replacement is not an rdataset')
|
||||
self.delete_rdataset(replacement.rdclass, replacement.rdtype,
|
||||
replacement.covers)
|
||||
self.rdatasets.append(replacement)
|
109
lib/dns/opcode.py
Normal file
109
lib/dns/opcode.py
Normal file
@@ -0,0 +1,109 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS Opcodes."""
|
||||
|
||||
import dns.exception
|
||||
|
||||
QUERY = 0
|
||||
IQUERY = 1
|
||||
STATUS = 2
|
||||
NOTIFY = 4
|
||||
UPDATE = 5
|
||||
|
||||
_by_text = {
|
||||
'QUERY': QUERY,
|
||||
'IQUERY': IQUERY,
|
||||
'STATUS': STATUS,
|
||||
'NOTIFY': NOTIFY,
|
||||
'UPDATE': UPDATE
|
||||
}
|
||||
|
||||
# We construct the inverse mapping programmatically to ensure that we
|
||||
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
|
||||
# would cause the mapping not to be true inverse.
|
||||
|
||||
_by_value = dict((y, x) for x, y in _by_text.items())
|
||||
|
||||
|
||||
class UnknownOpcode(dns.exception.DNSException):
|
||||
|
||||
"""An DNS opcode is unknown."""
|
||||
|
||||
|
||||
def from_text(text):
|
||||
"""Convert text into an opcode.
|
||||
|
||||
@param text: the textual opcode
|
||||
@type text: string
|
||||
@raises UnknownOpcode: the opcode is unknown
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
if text.isdigit():
|
||||
value = int(text)
|
||||
if value >= 0 and value <= 15:
|
||||
return value
|
||||
value = _by_text.get(text.upper())
|
||||
if value is None:
|
||||
raise UnknownOpcode
|
||||
return value
|
||||
|
||||
|
||||
def from_flags(flags):
|
||||
"""Extract an opcode from DNS message flags.
|
||||
|
||||
@param flags: int
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
return (flags & 0x7800) >> 11
|
||||
|
||||
|
||||
def to_flags(value):
|
||||
"""Convert an opcode to a value suitable for ORing into DNS message
|
||||
flags.
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
return (value << 11) & 0x7800
|
||||
|
||||
|
||||
def to_text(value):
|
||||
"""Convert an opcode to text.
|
||||
|
||||
@param value: the opcdoe
|
||||
@type value: int
|
||||
@raises UnknownOpcode: the opcode is unknown
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
text = _by_value.get(value)
|
||||
if text is None:
|
||||
text = str(value)
|
||||
return text
|
||||
|
||||
|
||||
def is_update(flags):
|
||||
"""True if the opcode in flags is UPDATE.
|
||||
|
||||
@param flags: DNS flags
|
||||
@type flags: int
|
||||
@rtype: bool
|
||||
"""
|
||||
|
||||
if (from_flags(flags) == UPDATE):
|
||||
return True
|
||||
return False
|
536
lib/dns/query.py
Normal file
536
lib/dns/query.py
Normal file
@@ -0,0 +1,536 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""Talk to a DNS server."""
|
||||
|
||||
from __future__ import generators
|
||||
|
||||
import errno
|
||||
import select
|
||||
import socket
|
||||
import struct
|
||||
import sys
|
||||
import time
|
||||
|
||||
import dns.exception
|
||||
import dns.inet
|
||||
import dns.name
|
||||
import dns.message
|
||||
import dns.rdataclass
|
||||
import dns.rdatatype
|
||||
from ._compat import long, string_types
|
||||
|
||||
if sys.version_info > (3,):
|
||||
select_error = OSError
|
||||
else:
|
||||
select_error = select.error
|
||||
|
||||
|
||||
class UnexpectedSource(dns.exception.DNSException):
|
||||
|
||||
"""A DNS query response came from an unexpected address or port."""
|
||||
|
||||
|
||||
class BadResponse(dns.exception.FormError):
|
||||
|
||||
"""A DNS query response does not respond to the question asked."""
|
||||
|
||||
|
||||
def _compute_expiration(timeout):
|
||||
if timeout is None:
|
||||
return None
|
||||
else:
|
||||
return time.time() + timeout
|
||||
|
||||
|
||||
def _poll_for(fd, readable, writable, error, timeout):
|
||||
"""Poll polling backend.
|
||||
@param fd: File descriptor
|
||||
@type fd: int
|
||||
@param readable: Whether to wait for readability
|
||||
@type readable: bool
|
||||
@param writable: Whether to wait for writability
|
||||
@type writable: bool
|
||||
@param timeout: Deadline timeout (expiration time, in seconds)
|
||||
@type timeout: float
|
||||
@return True on success, False on timeout
|
||||
"""
|
||||
event_mask = 0
|
||||
if readable:
|
||||
event_mask |= select.POLLIN
|
||||
if writable:
|
||||
event_mask |= select.POLLOUT
|
||||
if error:
|
||||
event_mask |= select.POLLERR
|
||||
|
||||
pollable = select.poll()
|
||||
pollable.register(fd, event_mask)
|
||||
|
||||
if timeout:
|
||||
event_list = pollable.poll(long(timeout * 1000))
|
||||
else:
|
||||
event_list = pollable.poll()
|
||||
|
||||
return bool(event_list)
|
||||
|
||||
|
||||
def _select_for(fd, readable, writable, error, timeout):
|
||||
"""Select polling backend.
|
||||
@param fd: File descriptor
|
||||
@type fd: int
|
||||
@param readable: Whether to wait for readability
|
||||
@type readable: bool
|
||||
@param writable: Whether to wait for writability
|
||||
@type writable: bool
|
||||
@param timeout: Deadline timeout (expiration time, in seconds)
|
||||
@type timeout: float
|
||||
@return True on success, False on timeout
|
||||
"""
|
||||
rset, wset, xset = [], [], []
|
||||
|
||||
if readable:
|
||||
rset = [fd]
|
||||
if writable:
|
||||
wset = [fd]
|
||||
if error:
|
||||
xset = [fd]
|
||||
|
||||
if timeout is None:
|
||||
(rcount, wcount, xcount) = select.select(rset, wset, xset)
|
||||
else:
|
||||
(rcount, wcount, xcount) = select.select(rset, wset, xset, timeout)
|
||||
|
||||
return bool((rcount or wcount or xcount))
|
||||
|
||||
|
||||
def _wait_for(fd, readable, writable, error, expiration):
|
||||
done = False
|
||||
while not done:
|
||||
if expiration is None:
|
||||
timeout = None
|
||||
else:
|
||||
timeout = expiration - time.time()
|
||||
if timeout <= 0.0:
|
||||
raise dns.exception.Timeout
|
||||
try:
|
||||
if not _polling_backend(fd, readable, writable, error, timeout):
|
||||
raise dns.exception.Timeout
|
||||
except select_error as e:
|
||||
if e.args[0] != errno.EINTR:
|
||||
raise e
|
||||
done = True
|
||||
|
||||
|
||||
def _set_polling_backend(fn):
|
||||
"""
|
||||
Internal API. Do not use.
|
||||
"""
|
||||
global _polling_backend
|
||||
|
||||
_polling_backend = fn
|
||||
|
||||
if hasattr(select, 'poll'):
|
||||
# Prefer poll() on platforms that support it because it has no
|
||||
# limits on the maximum value of a file descriptor (plus it will
|
||||
# be more efficient for high values).
|
||||
_polling_backend = _poll_for
|
||||
else:
|
||||
_polling_backend = _select_for
|
||||
|
||||
|
||||
def _wait_for_readable(s, expiration):
|
||||
_wait_for(s, True, False, True, expiration)
|
||||
|
||||
|
||||
def _wait_for_writable(s, expiration):
|
||||
_wait_for(s, False, True, True, expiration)
|
||||
|
||||
|
||||
def _addresses_equal(af, a1, a2):
|
||||
# Convert the first value of the tuple, which is a textual format
|
||||
# address into binary form, so that we are not confused by different
|
||||
# textual representations of the same address
|
||||
n1 = dns.inet.inet_pton(af, a1[0])
|
||||
n2 = dns.inet.inet_pton(af, a2[0])
|
||||
return n1 == n2 and a1[1:] == a2[1:]
|
||||
|
||||
|
||||
def _destination_and_source(af, where, port, source, source_port):
|
||||
# Apply defaults and compute destination and source tuples
|
||||
# suitable for use in connect(), sendto(), or bind().
|
||||
if af is None:
|
||||
try:
|
||||
af = dns.inet.af_for_address(where)
|
||||
except:
|
||||
af = dns.inet.AF_INET
|
||||
if af == dns.inet.AF_INET:
|
||||
destination = (where, port)
|
||||
if source is not None or source_port != 0:
|
||||
if source is None:
|
||||
source = '0.0.0.0'
|
||||
source = (source, source_port)
|
||||
elif af == dns.inet.AF_INET6:
|
||||
destination = (where, port, 0, 0)
|
||||
if source is not None or source_port != 0:
|
||||
if source is None:
|
||||
source = '::'
|
||||
source = (source, source_port, 0, 0)
|
||||
return (af, destination, source)
|
||||
|
||||
|
||||
def udp(q, where, timeout=None, port=53, af=None, source=None, source_port=0,
|
||||
ignore_unexpected=False, one_rr_per_rrset=False):
|
||||
"""Return the response obtained after sending a query via UDP.
|
||||
|
||||
@param q: the query
|
||||
@type q: dns.message.Message
|
||||
@param where: where to send the message
|
||||
@type where: string containing an IPv4 or IPv6 address
|
||||
@param timeout: The number of seconds to wait before the query times out.
|
||||
If None, the default, wait forever.
|
||||
@type timeout: float
|
||||
@param port: The port to which to send the message. The default is 53.
|
||||
@type port: int
|
||||
@param af: the address family to use. The default is None, which
|
||||
causes the address family to use to be inferred from the form of where.
|
||||
If the inference attempt fails, AF_INET is used.
|
||||
@type af: int
|
||||
@rtype: dns.message.Message object
|
||||
@param source: source address. The default is the wildcard address.
|
||||
@type source: string
|
||||
@param source_port: The port from which to send the message.
|
||||
The default is 0.
|
||||
@type source_port: int
|
||||
@param ignore_unexpected: If True, ignore responses from unexpected
|
||||
sources. The default is False.
|
||||
@type ignore_unexpected: bool
|
||||
@param one_rr_per_rrset: Put each RR into its own RRset
|
||||
@type one_rr_per_rrset: bool
|
||||
"""
|
||||
|
||||
wire = q.to_wire()
|
||||
(af, destination, source) = _destination_and_source(af, where, port,
|
||||
source, source_port)
|
||||
s = socket.socket(af, socket.SOCK_DGRAM, 0)
|
||||
begin_time = None
|
||||
try:
|
||||
expiration = _compute_expiration(timeout)
|
||||
s.setblocking(0)
|
||||
if source is not None:
|
||||
s.bind(source)
|
||||
_wait_for_writable(s, expiration)
|
||||
begin_time = time.time()
|
||||
s.sendto(wire, destination)
|
||||
while 1:
|
||||
_wait_for_readable(s, expiration)
|
||||
(wire, from_address) = s.recvfrom(65535)
|
||||
if _addresses_equal(af, from_address, destination) or \
|
||||
(dns.inet.is_multicast(where) and
|
||||
from_address[1:] == destination[1:]):
|
||||
break
|
||||
if not ignore_unexpected:
|
||||
raise UnexpectedSource('got a response from '
|
||||
'%s instead of %s' % (from_address,
|
||||
destination))
|
||||
finally:
|
||||
if begin_time is None:
|
||||
response_time = 0
|
||||
else:
|
||||
response_time = time.time() - begin_time
|
||||
s.close()
|
||||
r = dns.message.from_wire(wire, keyring=q.keyring, request_mac=q.mac,
|
||||
one_rr_per_rrset=one_rr_per_rrset)
|
||||
r.time = response_time
|
||||
if not q.is_response(r):
|
||||
raise BadResponse
|
||||
return r
|
||||
|
||||
|
||||
def _net_read(sock, count, expiration):
|
||||
"""Read the specified number of bytes from sock. Keep trying until we
|
||||
either get the desired amount, or we hit EOF.
|
||||
A Timeout exception will be raised if the operation is not completed
|
||||
by the expiration time.
|
||||
"""
|
||||
s = b''
|
||||
while count > 0:
|
||||
_wait_for_readable(sock, expiration)
|
||||
n = sock.recv(count)
|
||||
if n == b'':
|
||||
raise EOFError
|
||||
count = count - len(n)
|
||||
s = s + n
|
||||
return s
|
||||
|
||||
|
||||
def _net_write(sock, data, expiration):
|
||||
"""Write the specified data to the socket.
|
||||
A Timeout exception will be raised if the operation is not completed
|
||||
by the expiration time.
|
||||
"""
|
||||
current = 0
|
||||
l = len(data)
|
||||
while current < l:
|
||||
_wait_for_writable(sock, expiration)
|
||||
current += sock.send(data[current:])
|
||||
|
||||
|
||||
def _connect(s, address):
|
||||
try:
|
||||
s.connect(address)
|
||||
except socket.error:
|
||||
(ty, v) = sys.exc_info()[:2]
|
||||
|
||||
if hasattr(v, 'errno'):
|
||||
v_err = v.errno
|
||||
else:
|
||||
v_err = v[0]
|
||||
if v_err not in [errno.EINPROGRESS, errno.EWOULDBLOCK, errno.EALREADY]:
|
||||
raise v
|
||||
|
||||
|
||||
def tcp(q, where, timeout=None, port=53, af=None, source=None, source_port=0,
|
||||
one_rr_per_rrset=False):
|
||||
"""Return the response obtained after sending a query via TCP.
|
||||
|
||||
@param q: the query
|
||||
@type q: dns.message.Message object
|
||||
@param where: where to send the message
|
||||
@type where: string containing an IPv4 or IPv6 address
|
||||
@param timeout: The number of seconds to wait before the query times out.
|
||||
If None, the default, wait forever.
|
||||
@type timeout: float
|
||||
@param port: The port to which to send the message. The default is 53.
|
||||
@type port: int
|
||||
@param af: the address family to use. The default is None, which
|
||||
causes the address family to use to be inferred from the form of where.
|
||||
If the inference attempt fails, AF_INET is used.
|
||||
@type af: int
|
||||
@rtype: dns.message.Message object
|
||||
@param source: source address. The default is the wildcard address.
|
||||
@type source: string
|
||||
@param source_port: The port from which to send the message.
|
||||
The default is 0.
|
||||
@type source_port: int
|
||||
@param one_rr_per_rrset: Put each RR into its own RRset
|
||||
@type one_rr_per_rrset: bool
|
||||
"""
|
||||
|
||||
wire = q.to_wire()
|
||||
(af, destination, source) = _destination_and_source(af, where, port,
|
||||
source, source_port)
|
||||
s = socket.socket(af, socket.SOCK_STREAM, 0)
|
||||
begin_time = None
|
||||
try:
|
||||
expiration = _compute_expiration(timeout)
|
||||
s.setblocking(0)
|
||||
begin_time = time.time()
|
||||
if source is not None:
|
||||
s.bind(source)
|
||||
_connect(s, destination)
|
||||
|
||||
l = len(wire)
|
||||
|
||||
# copying the wire into tcpmsg is inefficient, but lets us
|
||||
# avoid writev() or doing a short write that would get pushed
|
||||
# onto the net
|
||||
tcpmsg = struct.pack("!H", l) + wire
|
||||
_net_write(s, tcpmsg, expiration)
|
||||
ldata = _net_read(s, 2, expiration)
|
||||
(l,) = struct.unpack("!H", ldata)
|
||||
wire = _net_read(s, l, expiration)
|
||||
finally:
|
||||
if begin_time is None:
|
||||
response_time = 0
|
||||
else:
|
||||
response_time = time.time() - begin_time
|
||||
s.close()
|
||||
r = dns.message.from_wire(wire, keyring=q.keyring, request_mac=q.mac,
|
||||
one_rr_per_rrset=one_rr_per_rrset)
|
||||
r.time = response_time
|
||||
if not q.is_response(r):
|
||||
raise BadResponse
|
||||
return r
|
||||
|
||||
|
||||
def xfr(where, zone, rdtype=dns.rdatatype.AXFR, rdclass=dns.rdataclass.IN,
|
||||
timeout=None, port=53, keyring=None, keyname=None, relativize=True,
|
||||
af=None, lifetime=None, source=None, source_port=0, serial=0,
|
||||
use_udp=False, keyalgorithm=dns.tsig.default_algorithm):
|
||||
"""Return a generator for the responses to a zone transfer.
|
||||
|
||||
@param where: where to send the message
|
||||
@type where: string containing an IPv4 or IPv6 address
|
||||
@param zone: The name of the zone to transfer
|
||||
@type zone: dns.name.Name object or string
|
||||
@param rdtype: The type of zone transfer. The default is
|
||||
dns.rdatatype.AXFR.
|
||||
@type rdtype: int or string
|
||||
@param rdclass: The class of the zone transfer. The default is
|
||||
dns.rdataclass.IN.
|
||||
@type rdclass: int or string
|
||||
@param timeout: The number of seconds to wait for each response message.
|
||||
If None, the default, wait forever.
|
||||
@type timeout: float
|
||||
@param port: The port to which to send the message. The default is 53.
|
||||
@type port: int
|
||||
@param keyring: The TSIG keyring to use
|
||||
@type keyring: dict
|
||||
@param keyname: The name of the TSIG key to use
|
||||
@type keyname: dns.name.Name object or string
|
||||
@param relativize: If True, all names in the zone will be relativized to
|
||||
the zone origin. It is essential that the relativize setting matches
|
||||
the one specified to dns.zone.from_xfr().
|
||||
@type relativize: bool
|
||||
@param af: the address family to use. The default is None, which
|
||||
causes the address family to use to be inferred from the form of where.
|
||||
If the inference attempt fails, AF_INET is used.
|
||||
@type af: int
|
||||
@param lifetime: The total number of seconds to spend doing the transfer.
|
||||
If None, the default, then there is no limit on the time the transfer may
|
||||
take.
|
||||
@type lifetime: float
|
||||
@rtype: generator of dns.message.Message objects.
|
||||
@param source: source address. The default is the wildcard address.
|
||||
@type source: string
|
||||
@param source_port: The port from which to send the message.
|
||||
The default is 0.
|
||||
@type source_port: int
|
||||
@param serial: The SOA serial number to use as the base for an IXFR diff
|
||||
sequence (only meaningful if rdtype == dns.rdatatype.IXFR).
|
||||
@type serial: int
|
||||
@param use_udp: Use UDP (only meaningful for IXFR)
|
||||
@type use_udp: bool
|
||||
@param keyalgorithm: The TSIG algorithm to use; defaults to
|
||||
dns.tsig.default_algorithm
|
||||
@type keyalgorithm: string
|
||||
"""
|
||||
|
||||
if isinstance(zone, string_types):
|
||||
zone = dns.name.from_text(zone)
|
||||
if isinstance(rdtype, string_types):
|
||||
rdtype = dns.rdatatype.from_text(rdtype)
|
||||
q = dns.message.make_query(zone, rdtype, rdclass)
|
||||
if rdtype == dns.rdatatype.IXFR:
|
||||
rrset = dns.rrset.from_text(zone, 0, 'IN', 'SOA',
|
||||
'. . %u 0 0 0 0' % serial)
|
||||
q.authority.append(rrset)
|
||||
if keyring is not None:
|
||||
q.use_tsig(keyring, keyname, algorithm=keyalgorithm)
|
||||
wire = q.to_wire()
|
||||
(af, destination, source) = _destination_and_source(af, where, port,
|
||||
source, source_port)
|
||||
if use_udp:
|
||||
if rdtype != dns.rdatatype.IXFR:
|
||||
raise ValueError('cannot do a UDP AXFR')
|
||||
s = socket.socket(af, socket.SOCK_DGRAM, 0)
|
||||
else:
|
||||
s = socket.socket(af, socket.SOCK_STREAM, 0)
|
||||
s.setblocking(0)
|
||||
if source is not None:
|
||||
s.bind(source)
|
||||
expiration = _compute_expiration(lifetime)
|
||||
_connect(s, destination)
|
||||
l = len(wire)
|
||||
if use_udp:
|
||||
_wait_for_writable(s, expiration)
|
||||
s.send(wire)
|
||||
else:
|
||||
tcpmsg = struct.pack("!H", l) + wire
|
||||
_net_write(s, tcpmsg, expiration)
|
||||
done = False
|
||||
delete_mode = True
|
||||
expecting_SOA = False
|
||||
soa_rrset = None
|
||||
if relativize:
|
||||
origin = zone
|
||||
oname = dns.name.empty
|
||||
else:
|
||||
origin = None
|
||||
oname = zone
|
||||
tsig_ctx = None
|
||||
first = True
|
||||
while not done:
|
||||
mexpiration = _compute_expiration(timeout)
|
||||
if mexpiration is None or mexpiration > expiration:
|
||||
mexpiration = expiration
|
||||
if use_udp:
|
||||
_wait_for_readable(s, expiration)
|
||||
(wire, from_address) = s.recvfrom(65535)
|
||||
else:
|
||||
ldata = _net_read(s, 2, mexpiration)
|
||||
(l,) = struct.unpack("!H", ldata)
|
||||
wire = _net_read(s, l, mexpiration)
|
||||
is_ixfr = (rdtype == dns.rdatatype.IXFR)
|
||||
r = dns.message.from_wire(wire, keyring=q.keyring, request_mac=q.mac,
|
||||
xfr=True, origin=origin, tsig_ctx=tsig_ctx,
|
||||
multi=True, first=first,
|
||||
one_rr_per_rrset=is_ixfr)
|
||||
tsig_ctx = r.tsig_ctx
|
||||
first = False
|
||||
answer_index = 0
|
||||
if soa_rrset is None:
|
||||
if not r.answer or r.answer[0].name != oname:
|
||||
raise dns.exception.FormError(
|
||||
"No answer or RRset not for qname")
|
||||
rrset = r.answer[0]
|
||||
if rrset.rdtype != dns.rdatatype.SOA:
|
||||
raise dns.exception.FormError("first RRset is not an SOA")
|
||||
answer_index = 1
|
||||
soa_rrset = rrset.copy()
|
||||
if rdtype == dns.rdatatype.IXFR:
|
||||
if soa_rrset[0].serial <= serial:
|
||||
#
|
||||
# We're already up-to-date.
|
||||
#
|
||||
done = True
|
||||
else:
|
||||
expecting_SOA = True
|
||||
#
|
||||
# Process SOAs in the answer section (other than the initial
|
||||
# SOA in the first message).
|
||||
#
|
||||
for rrset in r.answer[answer_index:]:
|
||||
if done:
|
||||
raise dns.exception.FormError("answers after final SOA")
|
||||
if rrset.rdtype == dns.rdatatype.SOA and rrset.name == oname:
|
||||
if expecting_SOA:
|
||||
if rrset[0].serial != serial:
|
||||
raise dns.exception.FormError(
|
||||
"IXFR base serial mismatch")
|
||||
expecting_SOA = False
|
||||
elif rdtype == dns.rdatatype.IXFR:
|
||||
delete_mode = not delete_mode
|
||||
#
|
||||
# If this SOA RRset is equal to the first we saw then we're
|
||||
# finished. If this is an IXFR we also check that we're seeing
|
||||
# the record in the expected part of the response.
|
||||
#
|
||||
if rrset == soa_rrset and \
|
||||
(rdtype == dns.rdatatype.AXFR or
|
||||
(rdtype == dns.rdatatype.IXFR and delete_mode)):
|
||||
done = True
|
||||
elif expecting_SOA:
|
||||
#
|
||||
# We made an IXFR request and are expecting another
|
||||
# SOA RR, but saw something else, so this must be an
|
||||
# AXFR response.
|
||||
#
|
||||
rdtype = dns.rdatatype.AXFR
|
||||
expecting_SOA = False
|
||||
if done and q.keyring and not r.had_tsig:
|
||||
raise dns.exception.FormError("missing TSIG")
|
||||
yield r
|
||||
s.close()
|
125
lib/dns/rcode.py
Normal file
125
lib/dns/rcode.py
Normal file
@@ -0,0 +1,125 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS Result Codes."""
|
||||
|
||||
import dns.exception
|
||||
from ._compat import long
|
||||
|
||||
|
||||
NOERROR = 0
|
||||
FORMERR = 1
|
||||
SERVFAIL = 2
|
||||
NXDOMAIN = 3
|
||||
NOTIMP = 4
|
||||
REFUSED = 5
|
||||
YXDOMAIN = 6
|
||||
YXRRSET = 7
|
||||
NXRRSET = 8
|
||||
NOTAUTH = 9
|
||||
NOTZONE = 10
|
||||
BADVERS = 16
|
||||
|
||||
_by_text = {
|
||||
'NOERROR': NOERROR,
|
||||
'FORMERR': FORMERR,
|
||||
'SERVFAIL': SERVFAIL,
|
||||
'NXDOMAIN': NXDOMAIN,
|
||||
'NOTIMP': NOTIMP,
|
||||
'REFUSED': REFUSED,
|
||||
'YXDOMAIN': YXDOMAIN,
|
||||
'YXRRSET': YXRRSET,
|
||||
'NXRRSET': NXRRSET,
|
||||
'NOTAUTH': NOTAUTH,
|
||||
'NOTZONE': NOTZONE,
|
||||
'BADVERS': BADVERS
|
||||
}
|
||||
|
||||
# We construct the inverse mapping programmatically to ensure that we
|
||||
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
|
||||
# would cause the mapping not to be a true inverse.
|
||||
|
||||
_by_value = dict((y, x) for x, y in _by_text.items())
|
||||
|
||||
|
||||
class UnknownRcode(dns.exception.DNSException):
|
||||
|
||||
"""A DNS rcode is unknown."""
|
||||
|
||||
|
||||
def from_text(text):
|
||||
"""Convert text into an rcode.
|
||||
|
||||
@param text: the textual rcode
|
||||
@type text: string
|
||||
@raises UnknownRcode: the rcode is unknown
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
if text.isdigit():
|
||||
v = int(text)
|
||||
if v >= 0 and v <= 4095:
|
||||
return v
|
||||
v = _by_text.get(text.upper())
|
||||
if v is None:
|
||||
raise UnknownRcode
|
||||
return v
|
||||
|
||||
|
||||
def from_flags(flags, ednsflags):
|
||||
"""Return the rcode value encoded by flags and ednsflags.
|
||||
|
||||
@param flags: the DNS flags
|
||||
@type flags: int
|
||||
@param ednsflags: the EDNS flags
|
||||
@type ednsflags: int
|
||||
@raises ValueError: rcode is < 0 or > 4095
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
value = (flags & 0x000f) | ((ednsflags >> 20) & 0xff0)
|
||||
if value < 0 or value > 4095:
|
||||
raise ValueError('rcode must be >= 0 and <= 4095')
|
||||
return value
|
||||
|
||||
|
||||
def to_flags(value):
|
||||
"""Return a (flags, ednsflags) tuple which encodes the rcode.
|
||||
|
||||
@param value: the rcode
|
||||
@type value: int
|
||||
@raises ValueError: rcode is < 0 or > 4095
|
||||
@rtype: (int, int) tuple
|
||||
"""
|
||||
|
||||
if value < 0 or value > 4095:
|
||||
raise ValueError('rcode must be >= 0 and <= 4095')
|
||||
v = value & 0xf
|
||||
ev = long(value & 0xff0) << 20
|
||||
return (v, ev)
|
||||
|
||||
|
||||
def to_text(value):
|
||||
"""Convert rcode into text.
|
||||
|
||||
@param value: the rcode
|
||||
@type value: int
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
text = _by_value.get(value)
|
||||
if text is None:
|
||||
text = str(value)
|
||||
return text
|
464
lib/dns/rdata.py
Normal file
464
lib/dns/rdata.py
Normal file
@@ -0,0 +1,464 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS rdata.
|
||||
|
||||
@var _rdata_modules: A dictionary mapping a (rdclass, rdtype) tuple to
|
||||
the module which implements that type.
|
||||
@type _rdata_modules: dict
|
||||
@var _module_prefix: The prefix to use when forming modules names. The
|
||||
default is 'dns.rdtypes'. Changing this value will break the library.
|
||||
@type _module_prefix: string
|
||||
@var _hex_chunk: At most this many octets that will be represented in each
|
||||
chunk of hexstring that _hexify() produces before whitespace occurs.
|
||||
@type _hex_chunk: int"""
|
||||
|
||||
from io import BytesIO
|
||||
import base64
|
||||
import binascii
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
import dns.name
|
||||
import dns.rdataclass
|
||||
import dns.rdatatype
|
||||
import dns.tokenizer
|
||||
import dns.wiredata
|
||||
from ._compat import xrange, string_types, text_type
|
||||
|
||||
_hex_chunksize = 32
|
||||
|
||||
|
||||
def _hexify(data, chunksize=_hex_chunksize):
|
||||
"""Convert a binary string into its hex encoding, broken up into chunks
|
||||
of I{chunksize} characters separated by a space.
|
||||
|
||||
@param data: the binary string
|
||||
@type data: string
|
||||
@param chunksize: the chunk size. Default is L{dns.rdata._hex_chunksize}
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
line = binascii.hexlify(data)
|
||||
return b' '.join([line[i:i + chunksize]
|
||||
for i
|
||||
in range(0, len(line), chunksize)]).decode()
|
||||
|
||||
_base64_chunksize = 32
|
||||
|
||||
|
||||
def _base64ify(data, chunksize=_base64_chunksize):
|
||||
"""Convert a binary string into its base64 encoding, broken up into chunks
|
||||
of I{chunksize} characters separated by a space.
|
||||
|
||||
@param data: the binary string
|
||||
@type data: string
|
||||
@param chunksize: the chunk size. Default is
|
||||
L{dns.rdata._base64_chunksize}
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
line = base64.b64encode(data)
|
||||
return b' '.join([line[i:i + chunksize]
|
||||
for i
|
||||
in range(0, len(line), chunksize)]).decode()
|
||||
|
||||
__escaped = {
|
||||
'"': True,
|
||||
'\\': True,
|
||||
}
|
||||
|
||||
|
||||
def _escapify(qstring):
|
||||
"""Escape the characters in a quoted string which need it.
|
||||
|
||||
@param qstring: the string
|
||||
@type qstring: string
|
||||
@returns: the escaped string
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
if isinstance(qstring, text_type):
|
||||
qstring = qstring.encode()
|
||||
if not isinstance(qstring, bytearray):
|
||||
qstring = bytearray(qstring)
|
||||
|
||||
text = ''
|
||||
for c in qstring:
|
||||
packed = struct.pack('!B', c).decode()
|
||||
if packed in __escaped:
|
||||
text += '\\' + packed
|
||||
elif c >= 0x20 and c < 0x7F:
|
||||
text += packed
|
||||
else:
|
||||
text += '\\%03d' % c
|
||||
return text
|
||||
|
||||
|
||||
def _truncate_bitmap(what):
|
||||
"""Determine the index of greatest byte that isn't all zeros, and
|
||||
return the bitmap that contains all the bytes less than that index.
|
||||
|
||||
@param what: a string of octets representing a bitmap.
|
||||
@type what: string
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
for i in xrange(len(what) - 1, -1, -1):
|
||||
if what[i] != 0:
|
||||
break
|
||||
return what[0: i + 1]
|
||||
|
||||
|
||||
class Rdata(object):
|
||||
|
||||
"""Base class for all DNS rdata types.
|
||||
"""
|
||||
|
||||
__slots__ = ['rdclass', 'rdtype']
|
||||
|
||||
def __init__(self, rdclass, rdtype):
|
||||
"""Initialize an rdata.
|
||||
@param rdclass: The rdata class
|
||||
@type rdclass: int
|
||||
@param rdtype: The rdata type
|
||||
@type rdtype: int
|
||||
"""
|
||||
|
||||
self.rdclass = rdclass
|
||||
self.rdtype = rdtype
|
||||
|
||||
def covers(self):
|
||||
"""DNS SIG/RRSIG rdatas apply to a specific type; this type is
|
||||
returned by the covers() function. If the rdata type is not
|
||||
SIG or RRSIG, dns.rdatatype.NONE is returned. This is useful when
|
||||
creating rdatasets, allowing the rdataset to contain only RRSIGs
|
||||
of a particular type, e.g. RRSIG(NS).
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
return dns.rdatatype.NONE
|
||||
|
||||
def extended_rdatatype(self):
|
||||
"""Return a 32-bit type value, the least significant 16 bits of
|
||||
which are the ordinary DNS type, and the upper 16 bits of which are
|
||||
the "covered" type, if any.
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
return self.covers() << 16 | self.rdtype
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
"""Convert an rdata to text format.
|
||||
@rtype: string
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
"""Convert an rdata to wire format.
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
def to_digestable(self, origin=None):
|
||||
"""Convert rdata to a format suitable for digesting in hashes. This
|
||||
is also the DNSSEC canonical form."""
|
||||
f = BytesIO()
|
||||
self.to_wire(f, None, origin)
|
||||
return f.getvalue()
|
||||
|
||||
def validate(self):
|
||||
"""Check that the current contents of the rdata's fields are
|
||||
valid. If you change an rdata by assigning to its fields,
|
||||
it is a good idea to call validate() when you are done making
|
||||
changes.
|
||||
"""
|
||||
dns.rdata.from_text(self.rdclass, self.rdtype, self.to_text())
|
||||
|
||||
def __repr__(self):
|
||||
covers = self.covers()
|
||||
if covers == dns.rdatatype.NONE:
|
||||
ctext = ''
|
||||
else:
|
||||
ctext = '(' + dns.rdatatype.to_text(covers) + ')'
|
||||
return '<DNS ' + dns.rdataclass.to_text(self.rdclass) + ' ' + \
|
||||
dns.rdatatype.to_text(self.rdtype) + ctext + ' rdata: ' + \
|
||||
str(self) + '>'
|
||||
|
||||
def __str__(self):
|
||||
return self.to_text()
|
||||
|
||||
def _cmp(self, other):
|
||||
"""Compare an rdata with another rdata of the same rdtype and
|
||||
rdclass. Return < 0 if self < other in the DNSSEC ordering,
|
||||
0 if self == other, and > 0 if self > other.
|
||||
"""
|
||||
our = self.to_digestable(dns.name.root)
|
||||
their = other.to_digestable(dns.name.root)
|
||||
if our == their:
|
||||
return 0
|
||||
if our > their:
|
||||
return 1
|
||||
|
||||
return -1
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Rdata):
|
||||
return False
|
||||
if self.rdclass != other.rdclass or self.rdtype != other.rdtype:
|
||||
return False
|
||||
return self._cmp(other) == 0
|
||||
|
||||
def __ne__(self, other):
|
||||
if not isinstance(other, Rdata):
|
||||
return True
|
||||
if self.rdclass != other.rdclass or self.rdtype != other.rdtype:
|
||||
return True
|
||||
return self._cmp(other) != 0
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, Rdata) or \
|
||||
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
|
||||
|
||||
return NotImplemented
|
||||
return self._cmp(other) < 0
|
||||
|
||||
def __le__(self, other):
|
||||
if not isinstance(other, Rdata) or \
|
||||
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) <= 0
|
||||
|
||||
def __ge__(self, other):
|
||||
if not isinstance(other, Rdata) or \
|
||||
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) >= 0
|
||||
|
||||
def __gt__(self, other):
|
||||
if not isinstance(other, Rdata) or \
|
||||
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) > 0
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.to_digestable(dns.name.root))
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
"""Build an rdata object from text format.
|
||||
|
||||
@param rdclass: The rdata class
|
||||
@type rdclass: int
|
||||
@param rdtype: The rdata type
|
||||
@type rdtype: int
|
||||
@param tok: The tokenizer
|
||||
@type tok: dns.tokenizer.Tokenizer
|
||||
@param origin: The origin to use for relative names
|
||||
@type origin: dns.name.Name
|
||||
@param relativize: should names be relativized?
|
||||
@type relativize: bool
|
||||
@rtype: dns.rdata.Rdata instance
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
"""Build an rdata object from wire format
|
||||
|
||||
@param rdclass: The rdata class
|
||||
@type rdclass: int
|
||||
@param rdtype: The rdata type
|
||||
@type rdtype: int
|
||||
@param wire: The wire-format message
|
||||
@type wire: string
|
||||
@param current: The offset in wire of the beginning of the rdata.
|
||||
@type current: int
|
||||
@param rdlen: The length of the wire-format rdata
|
||||
@type rdlen: int
|
||||
@param origin: The origin to use for relative names
|
||||
@type origin: dns.name.Name
|
||||
@rtype: dns.rdata.Rdata instance
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
def choose_relativity(self, origin=None, relativize=True):
|
||||
"""Convert any domain names in the rdata to the specified
|
||||
relativization.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class GenericRdata(Rdata):
|
||||
|
||||
"""Generate Rdata Class
|
||||
|
||||
This class is used for rdata types for which we have no better
|
||||
implementation. It implements the DNS "unknown RRs" scheme.
|
||||
"""
|
||||
|
||||
__slots__ = ['data']
|
||||
|
||||
def __init__(self, rdclass, rdtype, data):
|
||||
super(GenericRdata, self).__init__(rdclass, rdtype)
|
||||
self.data = data
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
return r'\# %d ' % len(self.data) + _hexify(self.data)
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
token = tok.get()
|
||||
if not token.is_identifier() or token.value != '\#':
|
||||
raise dns.exception.SyntaxError(
|
||||
r'generic rdata does not start with \#')
|
||||
length = tok.get_int()
|
||||
chunks = []
|
||||
while 1:
|
||||
token = tok.get()
|
||||
if token.is_eol_or_eof():
|
||||
break
|
||||
chunks.append(token.value.encode())
|
||||
hex = b''.join(chunks)
|
||||
data = binascii.unhexlify(hex)
|
||||
if len(data) != length:
|
||||
raise dns.exception.SyntaxError(
|
||||
'generic rdata hex data has wrong length')
|
||||
return cls(rdclass, rdtype, data)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
file.write(self.data)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
return cls(rdclass, rdtype, wire[current: current + rdlen])
|
||||
|
||||
_rdata_modules = {}
|
||||
_module_prefix = 'dns.rdtypes'
|
||||
|
||||
|
||||
def get_rdata_class(rdclass, rdtype):
|
||||
|
||||
def import_module(name):
|
||||
mod = __import__(name)
|
||||
components = name.split('.')
|
||||
for comp in components[1:]:
|
||||
mod = getattr(mod, comp)
|
||||
return mod
|
||||
|
||||
mod = _rdata_modules.get((rdclass, rdtype))
|
||||
rdclass_text = dns.rdataclass.to_text(rdclass)
|
||||
rdtype_text = dns.rdatatype.to_text(rdtype)
|
||||
rdtype_text = rdtype_text.replace('-', '_')
|
||||
if not mod:
|
||||
mod = _rdata_modules.get((dns.rdatatype.ANY, rdtype))
|
||||
if not mod:
|
||||
try:
|
||||
mod = import_module('.'.join([_module_prefix,
|
||||
rdclass_text, rdtype_text]))
|
||||
_rdata_modules[(rdclass, rdtype)] = mod
|
||||
except ImportError:
|
||||
try:
|
||||
mod = import_module('.'.join([_module_prefix,
|
||||
'ANY', rdtype_text]))
|
||||
_rdata_modules[(dns.rdataclass.ANY, rdtype)] = mod
|
||||
except ImportError:
|
||||
mod = None
|
||||
if mod:
|
||||
cls = getattr(mod, rdtype_text)
|
||||
else:
|
||||
cls = GenericRdata
|
||||
return cls
|
||||
|
||||
|
||||
def from_text(rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
"""Build an rdata object from text format.
|
||||
|
||||
This function attempts to dynamically load a class which
|
||||
implements the specified rdata class and type. If there is no
|
||||
class-and-type-specific implementation, the GenericRdata class
|
||||
is used.
|
||||
|
||||
Once a class is chosen, its from_text() class method is called
|
||||
with the parameters to this function.
|
||||
|
||||
If I{tok} is a string, then a tokenizer is created and the string
|
||||
is used as its input.
|
||||
|
||||
@param rdclass: The rdata class
|
||||
@type rdclass: int
|
||||
@param rdtype: The rdata type
|
||||
@type rdtype: int
|
||||
@param tok: The tokenizer or input text
|
||||
@type tok: dns.tokenizer.Tokenizer or string
|
||||
@param origin: The origin to use for relative names
|
||||
@type origin: dns.name.Name
|
||||
@param relativize: Should names be relativized?
|
||||
@type relativize: bool
|
||||
@rtype: dns.rdata.Rdata instance"""
|
||||
|
||||
if isinstance(tok, string_types):
|
||||
tok = dns.tokenizer.Tokenizer(tok)
|
||||
cls = get_rdata_class(rdclass, rdtype)
|
||||
if cls != GenericRdata:
|
||||
# peek at first token
|
||||
token = tok.get()
|
||||
tok.unget(token)
|
||||
if token.is_identifier() and \
|
||||
token.value == r'\#':
|
||||
#
|
||||
# Known type using the generic syntax. Extract the
|
||||
# wire form from the generic syntax, and then run
|
||||
# from_wire on it.
|
||||
#
|
||||
rdata = GenericRdata.from_text(rdclass, rdtype, tok, origin,
|
||||
relativize)
|
||||
return from_wire(rdclass, rdtype, rdata.data, 0, len(rdata.data),
|
||||
origin)
|
||||
return cls.from_text(rdclass, rdtype, tok, origin, relativize)
|
||||
|
||||
|
||||
def from_wire(rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
"""Build an rdata object from wire format
|
||||
|
||||
This function attempts to dynamically load a class which
|
||||
implements the specified rdata class and type. If there is no
|
||||
class-and-type-specific implementation, the GenericRdata class
|
||||
is used.
|
||||
|
||||
Once a class is chosen, its from_wire() class method is called
|
||||
with the parameters to this function.
|
||||
|
||||
@param rdclass: The rdata class
|
||||
@type rdclass: int
|
||||
@param rdtype: The rdata type
|
||||
@type rdtype: int
|
||||
@param wire: The wire-format message
|
||||
@type wire: string
|
||||
@param current: The offset in wire of the beginning of the rdata.
|
||||
@type current: int
|
||||
@param rdlen: The length of the wire-format rdata
|
||||
@type rdlen: int
|
||||
@param origin: The origin to use for relative names
|
||||
@type origin: dns.name.Name
|
||||
@rtype: dns.rdata.Rdata instance"""
|
||||
|
||||
wire = dns.wiredata.maybe_wrap(wire)
|
||||
cls = get_rdata_class(rdclass, rdtype)
|
||||
return cls.from_wire(rdclass, rdtype, wire, current, rdlen, origin)
|
118
lib/dns/rdataclass.py
Normal file
118
lib/dns/rdataclass.py
Normal file
@@ -0,0 +1,118 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS Rdata Classes.
|
||||
|
||||
@var _by_text: The rdata class textual name to value mapping
|
||||
@type _by_text: dict
|
||||
@var _by_value: The rdata class value to textual name mapping
|
||||
@type _by_value: dict
|
||||
@var _metaclasses: If an rdataclass is a metaclass, there will be a mapping
|
||||
whose key is the rdatatype value and whose value is True in this dictionary.
|
||||
@type _metaclasses: dict"""
|
||||
|
||||
import re
|
||||
|
||||
import dns.exception
|
||||
|
||||
RESERVED0 = 0
|
||||
IN = 1
|
||||
CH = 3
|
||||
HS = 4
|
||||
NONE = 254
|
||||
ANY = 255
|
||||
|
||||
_by_text = {
|
||||
'RESERVED0': RESERVED0,
|
||||
'IN': IN,
|
||||
'CH': CH,
|
||||
'HS': HS,
|
||||
'NONE': NONE,
|
||||
'ANY': ANY
|
||||
}
|
||||
|
||||
# We construct the inverse mapping programmatically to ensure that we
|
||||
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
|
||||
# would cause the mapping not to be true inverse.
|
||||
|
||||
_by_value = dict((y, x) for x, y in _by_text.items())
|
||||
|
||||
# Now that we've built the inverse map, we can add class aliases to
|
||||
# the _by_text mapping.
|
||||
|
||||
_by_text.update({
|
||||
'INTERNET': IN,
|
||||
'CHAOS': CH,
|
||||
'HESIOD': HS
|
||||
})
|
||||
|
||||
_metaclasses = {
|
||||
NONE: True,
|
||||
ANY: True
|
||||
}
|
||||
|
||||
_unknown_class_pattern = re.compile('CLASS([0-9]+)$', re.I)
|
||||
|
||||
|
||||
class UnknownRdataclass(dns.exception.DNSException):
|
||||
|
||||
"""A DNS class is unknown."""
|
||||
|
||||
|
||||
def from_text(text):
|
||||
"""Convert text into a DNS rdata class value.
|
||||
@param text: the text
|
||||
@type text: string
|
||||
@rtype: int
|
||||
@raises dns.rdataclass.UnknownRdataclass: the class is unknown
|
||||
@raises ValueError: the rdata class value is not >= 0 and <= 65535
|
||||
"""
|
||||
|
||||
value = _by_text.get(text.upper())
|
||||
if value is None:
|
||||
match = _unknown_class_pattern.match(text)
|
||||
if match is None:
|
||||
raise UnknownRdataclass
|
||||
value = int(match.group(1))
|
||||
if value < 0 or value > 65535:
|
||||
raise ValueError("class must be between >= 0 and <= 65535")
|
||||
return value
|
||||
|
||||
|
||||
def to_text(value):
|
||||
"""Convert a DNS rdata class to text.
|
||||
@param value: the rdata class value
|
||||
@type value: int
|
||||
@rtype: string
|
||||
@raises ValueError: the rdata class value is not >= 0 and <= 65535
|
||||
"""
|
||||
|
||||
if value < 0 or value > 65535:
|
||||
raise ValueError("class must be between >= 0 and <= 65535")
|
||||
text = _by_value.get(value)
|
||||
if text is None:
|
||||
text = 'CLASS' + repr(value)
|
||||
return text
|
||||
|
||||
|
||||
def is_metaclass(rdclass):
|
||||
"""True if the class is a metaclass.
|
||||
@param rdclass: the rdata class
|
||||
@type rdclass: int
|
||||
@rtype: bool"""
|
||||
|
||||
if rdclass in _metaclasses:
|
||||
return True
|
||||
return False
|
338
lib/dns/rdataset.py
Normal file
338
lib/dns/rdataset.py
Normal file
@@ -0,0 +1,338 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS rdatasets (an rdataset is a set of rdatas of a given type and class)"""
|
||||
|
||||
import random
|
||||
from io import StringIO
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
import dns.rdatatype
|
||||
import dns.rdataclass
|
||||
import dns.rdata
|
||||
import dns.set
|
||||
from ._compat import string_types
|
||||
|
||||
# define SimpleSet here for backwards compatibility
|
||||
SimpleSet = dns.set.Set
|
||||
|
||||
|
||||
class DifferingCovers(dns.exception.DNSException):
|
||||
|
||||
"""An attempt was made to add a DNS SIG/RRSIG whose covered type
|
||||
is not the same as that of the other rdatas in the rdataset."""
|
||||
|
||||
|
||||
class IncompatibleTypes(dns.exception.DNSException):
|
||||
|
||||
"""An attempt was made to add DNS RR data of an incompatible type."""
|
||||
|
||||
|
||||
class Rdataset(dns.set.Set):
|
||||
|
||||
"""A DNS rdataset.
|
||||
|
||||
@ivar rdclass: The class of the rdataset
|
||||
@type rdclass: int
|
||||
@ivar rdtype: The type of the rdataset
|
||||
@type rdtype: int
|
||||
@ivar covers: The covered type. Usually this value is
|
||||
dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or
|
||||
dns.rdatatype.RRSIG, then the covers value will be the rdata
|
||||
type the SIG/RRSIG covers. The library treats the SIG and RRSIG
|
||||
types as if they were a family of
|
||||
types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much
|
||||
easier to work with than if RRSIGs covering different rdata
|
||||
types were aggregated into a single RRSIG rdataset.
|
||||
@type covers: int
|
||||
@ivar ttl: The DNS TTL (Time To Live) value
|
||||
@type ttl: int
|
||||
"""
|
||||
|
||||
__slots__ = ['rdclass', 'rdtype', 'covers', 'ttl']
|
||||
|
||||
def __init__(self, rdclass, rdtype, covers=dns.rdatatype.NONE):
|
||||
"""Create a new rdataset of the specified class and type.
|
||||
|
||||
@see: the description of the class instance variables for the
|
||||
meaning of I{rdclass} and I{rdtype}"""
|
||||
|
||||
super(Rdataset, self).__init__()
|
||||
self.rdclass = rdclass
|
||||
self.rdtype = rdtype
|
||||
self.covers = covers
|
||||
self.ttl = 0
|
||||
|
||||
def _clone(self):
|
||||
obj = super(Rdataset, self)._clone()
|
||||
obj.rdclass = self.rdclass
|
||||
obj.rdtype = self.rdtype
|
||||
obj.covers = self.covers
|
||||
obj.ttl = self.ttl
|
||||
return obj
|
||||
|
||||
def update_ttl(self, ttl):
|
||||
"""Set the TTL of the rdataset to be the lesser of the set's current
|
||||
TTL or the specified TTL. If the set contains no rdatas, set the TTL
|
||||
to the specified TTL.
|
||||
@param ttl: The TTL
|
||||
@type ttl: int"""
|
||||
|
||||
if len(self) == 0:
|
||||
self.ttl = ttl
|
||||
elif ttl < self.ttl:
|
||||
self.ttl = ttl
|
||||
|
||||
def add(self, rd, ttl=None):
|
||||
"""Add the specified rdata to the rdataset.
|
||||
|
||||
If the optional I{ttl} parameter is supplied, then
|
||||
self.update_ttl(ttl) will be called prior to adding the rdata.
|
||||
|
||||
@param rd: The rdata
|
||||
@type rd: dns.rdata.Rdata object
|
||||
@param ttl: The TTL
|
||||
@type ttl: int"""
|
||||
|
||||
#
|
||||
# If we're adding a signature, do some special handling to
|
||||
# check that the signature covers the same type as the
|
||||
# other rdatas in this rdataset. If this is the first rdata
|
||||
# in the set, initialize the covers field.
|
||||
#
|
||||
if self.rdclass != rd.rdclass or self.rdtype != rd.rdtype:
|
||||
raise IncompatibleTypes
|
||||
if ttl is not None:
|
||||
self.update_ttl(ttl)
|
||||
if self.rdtype == dns.rdatatype.RRSIG or \
|
||||
self.rdtype == dns.rdatatype.SIG:
|
||||
covers = rd.covers()
|
||||
if len(self) == 0 and self.covers == dns.rdatatype.NONE:
|
||||
self.covers = covers
|
||||
elif self.covers != covers:
|
||||
raise DifferingCovers
|
||||
if dns.rdatatype.is_singleton(rd.rdtype) and len(self) > 0:
|
||||
self.clear()
|
||||
super(Rdataset, self).add(rd)
|
||||
|
||||
def union_update(self, other):
|
||||
self.update_ttl(other.ttl)
|
||||
super(Rdataset, self).union_update(other)
|
||||
|
||||
def intersection_update(self, other):
|
||||
self.update_ttl(other.ttl)
|
||||
super(Rdataset, self).intersection_update(other)
|
||||
|
||||
def update(self, other):
|
||||
"""Add all rdatas in other to self.
|
||||
|
||||
@param other: The rdataset from which to update
|
||||
@type other: dns.rdataset.Rdataset object"""
|
||||
|
||||
self.update_ttl(other.ttl)
|
||||
super(Rdataset, self).update(other)
|
||||
|
||||
def __repr__(self):
|
||||
if self.covers == 0:
|
||||
ctext = ''
|
||||
else:
|
||||
ctext = '(' + dns.rdatatype.to_text(self.covers) + ')'
|
||||
return '<DNS ' + dns.rdataclass.to_text(self.rdclass) + ' ' + \
|
||||
dns.rdatatype.to_text(self.rdtype) + ctext + ' rdataset>'
|
||||
|
||||
def __str__(self):
|
||||
return self.to_text()
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Two rdatasets are equal if they have the same class, type, and
|
||||
covers, and contain the same rdata.
|
||||
@rtype: bool"""
|
||||
|
||||
if not isinstance(other, Rdataset):
|
||||
return False
|
||||
if self.rdclass != other.rdclass or \
|
||||
self.rdtype != other.rdtype or \
|
||||
self.covers != other.covers:
|
||||
return False
|
||||
return super(Rdataset, self).__eq__(other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def to_text(self, name=None, origin=None, relativize=True,
|
||||
override_rdclass=None, **kw):
|
||||
"""Convert the rdataset into DNS master file format.
|
||||
|
||||
@see: L{dns.name.Name.choose_relativity} for more information
|
||||
on how I{origin} and I{relativize} determine the way names
|
||||
are emitted.
|
||||
|
||||
Any additional keyword arguments are passed on to the rdata
|
||||
to_text() method.
|
||||
|
||||
@param name: If name is not None, emit a RRs with I{name} as
|
||||
the owner name.
|
||||
@type name: dns.name.Name object
|
||||
@param origin: The origin for relative names, or None.
|
||||
@type origin: dns.name.Name object
|
||||
@param relativize: True if names should names be relativized
|
||||
@type relativize: bool"""
|
||||
if name is not None:
|
||||
name = name.choose_relativity(origin, relativize)
|
||||
ntext = str(name)
|
||||
pad = ' '
|
||||
else:
|
||||
ntext = ''
|
||||
pad = ''
|
||||
s = StringIO()
|
||||
if override_rdclass is not None:
|
||||
rdclass = override_rdclass
|
||||
else:
|
||||
rdclass = self.rdclass
|
||||
if len(self) == 0:
|
||||
#
|
||||
# Empty rdatasets are used for the question section, and in
|
||||
# some dynamic updates, so we don't need to print out the TTL
|
||||
# (which is meaningless anyway).
|
||||
#
|
||||
s.write(u'%s%s%s %s\n' % (ntext, pad,
|
||||
dns.rdataclass.to_text(rdclass),
|
||||
dns.rdatatype.to_text(self.rdtype)))
|
||||
else:
|
||||
for rd in self:
|
||||
s.write(u'%s%s%d %s %s %s\n' %
|
||||
(ntext, pad, self.ttl, dns.rdataclass.to_text(rdclass),
|
||||
dns.rdatatype.to_text(self.rdtype),
|
||||
rd.to_text(origin=origin, relativize=relativize,
|
||||
**kw)))
|
||||
#
|
||||
# We strip off the final \n for the caller's convenience in printing
|
||||
#
|
||||
return s.getvalue()[:-1]
|
||||
|
||||
def to_wire(self, name, file, compress=None, origin=None,
|
||||
override_rdclass=None, want_shuffle=True):
|
||||
"""Convert the rdataset to wire format.
|
||||
|
||||
@param name: The owner name of the RRset that will be emitted
|
||||
@type name: dns.name.Name object
|
||||
@param file: The file to which the wire format data will be appended
|
||||
@type file: file
|
||||
@param compress: The compression table to use; the default is None.
|
||||
@type compress: dict
|
||||
@param origin: The origin to be appended to any relative names when
|
||||
they are emitted. The default is None.
|
||||
@returns: the number of records emitted
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
if override_rdclass is not None:
|
||||
rdclass = override_rdclass
|
||||
want_shuffle = False
|
||||
else:
|
||||
rdclass = self.rdclass
|
||||
file.seek(0, 2)
|
||||
if len(self) == 0:
|
||||
name.to_wire(file, compress, origin)
|
||||
stuff = struct.pack("!HHIH", self.rdtype, rdclass, 0, 0)
|
||||
file.write(stuff)
|
||||
return 1
|
||||
else:
|
||||
if want_shuffle:
|
||||
l = list(self)
|
||||
random.shuffle(l)
|
||||
else:
|
||||
l = self
|
||||
for rd in l:
|
||||
name.to_wire(file, compress, origin)
|
||||
stuff = struct.pack("!HHIH", self.rdtype, rdclass,
|
||||
self.ttl, 0)
|
||||
file.write(stuff)
|
||||
start = file.tell()
|
||||
rd.to_wire(file, compress, origin)
|
||||
end = file.tell()
|
||||
assert end - start < 65536
|
||||
file.seek(start - 2)
|
||||
stuff = struct.pack("!H", end - start)
|
||||
file.write(stuff)
|
||||
file.seek(0, 2)
|
||||
return len(self)
|
||||
|
||||
def match(self, rdclass, rdtype, covers):
|
||||
"""Returns True if this rdataset matches the specified class, type,
|
||||
and covers"""
|
||||
if self.rdclass == rdclass and \
|
||||
self.rdtype == rdtype and \
|
||||
self.covers == covers:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def from_text_list(rdclass, rdtype, ttl, text_rdatas):
|
||||
"""Create an rdataset with the specified class, type, and TTL, and with
|
||||
the specified list of rdatas in text format.
|
||||
|
||||
@rtype: dns.rdataset.Rdataset object
|
||||
"""
|
||||
|
||||
if isinstance(rdclass, string_types):
|
||||
rdclass = dns.rdataclass.from_text(rdclass)
|
||||
if isinstance(rdtype, string_types):
|
||||
rdtype = dns.rdatatype.from_text(rdtype)
|
||||
r = Rdataset(rdclass, rdtype)
|
||||
r.update_ttl(ttl)
|
||||
for t in text_rdatas:
|
||||
rd = dns.rdata.from_text(r.rdclass, r.rdtype, t)
|
||||
r.add(rd)
|
||||
return r
|
||||
|
||||
|
||||
def from_text(rdclass, rdtype, ttl, *text_rdatas):
|
||||
"""Create an rdataset with the specified class, type, and TTL, and with
|
||||
the specified rdatas in text format.
|
||||
|
||||
@rtype: dns.rdataset.Rdataset object
|
||||
"""
|
||||
|
||||
return from_text_list(rdclass, rdtype, ttl, text_rdatas)
|
||||
|
||||
|
||||
def from_rdata_list(ttl, rdatas):
|
||||
"""Create an rdataset with the specified TTL, and with
|
||||
the specified list of rdata objects.
|
||||
|
||||
@rtype: dns.rdataset.Rdataset object
|
||||
"""
|
||||
|
||||
if len(rdatas) == 0:
|
||||
raise ValueError("rdata list must not be empty")
|
||||
r = None
|
||||
for rd in rdatas:
|
||||
if r is None:
|
||||
r = Rdataset(rd.rdclass, rd.rdtype)
|
||||
r.update_ttl(ttl)
|
||||
r.add(rd)
|
||||
return r
|
||||
|
||||
|
||||
def from_rdata(ttl, *rdatas):
|
||||
"""Create an rdataset with the specified TTL, and with
|
||||
the specified rdata objects.
|
||||
|
||||
@rtype: dns.rdataset.Rdataset object
|
||||
"""
|
||||
|
||||
return from_rdata_list(ttl, rdatas)
|
253
lib/dns/rdatatype.py
Normal file
253
lib/dns/rdatatype.py
Normal file
@@ -0,0 +1,253 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS Rdata Types.
|
||||
|
||||
@var _by_text: The rdata type textual name to value mapping
|
||||
@type _by_text: dict
|
||||
@var _by_value: The rdata type value to textual name mapping
|
||||
@type _by_value: dict
|
||||
@var _metatypes: If an rdatatype is a metatype, there will be a mapping
|
||||
whose key is the rdatatype value and whose value is True in this dictionary.
|
||||
@type _metatypes: dict
|
||||
@var _singletons: If an rdatatype is a singleton, there will be a mapping
|
||||
whose key is the rdatatype value and whose value is True in this dictionary.
|
||||
@type _singletons: dict"""
|
||||
|
||||
import re
|
||||
|
||||
import dns.exception
|
||||
|
||||
NONE = 0
|
||||
A = 1
|
||||
NS = 2
|
||||
MD = 3
|
||||
MF = 4
|
||||
CNAME = 5
|
||||
SOA = 6
|
||||
MB = 7
|
||||
MG = 8
|
||||
MR = 9
|
||||
NULL = 10
|
||||
WKS = 11
|
||||
PTR = 12
|
||||
HINFO = 13
|
||||
MINFO = 14
|
||||
MX = 15
|
||||
TXT = 16
|
||||
RP = 17
|
||||
AFSDB = 18
|
||||
X25 = 19
|
||||
ISDN = 20
|
||||
RT = 21
|
||||
NSAP = 22
|
||||
NSAP_PTR = 23
|
||||
SIG = 24
|
||||
KEY = 25
|
||||
PX = 26
|
||||
GPOS = 27
|
||||
AAAA = 28
|
||||
LOC = 29
|
||||
NXT = 30
|
||||
SRV = 33
|
||||
NAPTR = 35
|
||||
KX = 36
|
||||
CERT = 37
|
||||
A6 = 38
|
||||
DNAME = 39
|
||||
OPT = 41
|
||||
APL = 42
|
||||
DS = 43
|
||||
SSHFP = 44
|
||||
IPSECKEY = 45
|
||||
RRSIG = 46
|
||||
NSEC = 47
|
||||
DNSKEY = 48
|
||||
DHCID = 49
|
||||
NSEC3 = 50
|
||||
NSEC3PARAM = 51
|
||||
TLSA = 52
|
||||
HIP = 55
|
||||
CDS = 59
|
||||
CDNSKEY = 60
|
||||
CSYNC = 62
|
||||
SPF = 99
|
||||
UNSPEC = 103
|
||||
EUI48 = 108
|
||||
EUI64 = 109
|
||||
TKEY = 249
|
||||
TSIG = 250
|
||||
IXFR = 251
|
||||
AXFR = 252
|
||||
MAILB = 253
|
||||
MAILA = 254
|
||||
ANY = 255
|
||||
URI = 256
|
||||
CAA = 257
|
||||
TA = 32768
|
||||
DLV = 32769
|
||||
|
||||
_by_text = {
|
||||
'NONE': NONE,
|
||||
'A': A,
|
||||
'NS': NS,
|
||||
'MD': MD,
|
||||
'MF': MF,
|
||||
'CNAME': CNAME,
|
||||
'SOA': SOA,
|
||||
'MB': MB,
|
||||
'MG': MG,
|
||||
'MR': MR,
|
||||
'NULL': NULL,
|
||||
'WKS': WKS,
|
||||
'PTR': PTR,
|
||||
'HINFO': HINFO,
|
||||
'MINFO': MINFO,
|
||||
'MX': MX,
|
||||
'TXT': TXT,
|
||||
'RP': RP,
|
||||
'AFSDB': AFSDB,
|
||||
'X25': X25,
|
||||
'ISDN': ISDN,
|
||||
'RT': RT,
|
||||
'NSAP': NSAP,
|
||||
'NSAP-PTR': NSAP_PTR,
|
||||
'SIG': SIG,
|
||||
'KEY': KEY,
|
||||
'PX': PX,
|
||||
'GPOS': GPOS,
|
||||
'AAAA': AAAA,
|
||||
'LOC': LOC,
|
||||
'NXT': NXT,
|
||||
'SRV': SRV,
|
||||
'NAPTR': NAPTR,
|
||||
'KX': KX,
|
||||
'CERT': CERT,
|
||||
'A6': A6,
|
||||
'DNAME': DNAME,
|
||||
'OPT': OPT,
|
||||
'APL': APL,
|
||||
'DS': DS,
|
||||
'SSHFP': SSHFP,
|
||||
'IPSECKEY': IPSECKEY,
|
||||
'RRSIG': RRSIG,
|
||||
'NSEC': NSEC,
|
||||
'DNSKEY': DNSKEY,
|
||||
'DHCID': DHCID,
|
||||
'NSEC3': NSEC3,
|
||||
'NSEC3PARAM': NSEC3PARAM,
|
||||
'TLSA': TLSA,
|
||||
'HIP': HIP,
|
||||
'CDS': CDS,
|
||||
'CDNSKEY': CDNSKEY,
|
||||
'CSYNC': CSYNC,
|
||||
'SPF': SPF,
|
||||
'UNSPEC': UNSPEC,
|
||||
'EUI48': EUI48,
|
||||
'EUI64': EUI64,
|
||||
'TKEY': TKEY,
|
||||
'TSIG': TSIG,
|
||||
'IXFR': IXFR,
|
||||
'AXFR': AXFR,
|
||||
'MAILB': MAILB,
|
||||
'MAILA': MAILA,
|
||||
'ANY': ANY,
|
||||
'URI': URI,
|
||||
'CAA': CAA,
|
||||
'TA': TA,
|
||||
'DLV': DLV,
|
||||
}
|
||||
|
||||
# We construct the inverse mapping programmatically to ensure that we
|
||||
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
|
||||
# would cause the mapping not to be true inverse.
|
||||
|
||||
_by_value = dict((y, x) for x, y in _by_text.items())
|
||||
|
||||
|
||||
_metatypes = {
|
||||
OPT: True
|
||||
}
|
||||
|
||||
_singletons = {
|
||||
SOA: True,
|
||||
NXT: True,
|
||||
DNAME: True,
|
||||
NSEC: True,
|
||||
# CNAME is technically a singleton, but we allow multiple CNAMEs.
|
||||
}
|
||||
|
||||
_unknown_type_pattern = re.compile('TYPE([0-9]+)$', re.I)
|
||||
|
||||
|
||||
class UnknownRdatatype(dns.exception.DNSException):
|
||||
|
||||
"""DNS resource record type is unknown."""
|
||||
|
||||
|
||||
def from_text(text):
|
||||
"""Convert text into a DNS rdata type value.
|
||||
@param text: the text
|
||||
@type text: string
|
||||
@raises dns.rdatatype.UnknownRdatatype: the type is unknown
|
||||
@raises ValueError: the rdata type value is not >= 0 and <= 65535
|
||||
@rtype: int"""
|
||||
|
||||
value = _by_text.get(text.upper())
|
||||
if value is None:
|
||||
match = _unknown_type_pattern.match(text)
|
||||
if match is None:
|
||||
raise UnknownRdatatype
|
||||
value = int(match.group(1))
|
||||
if value < 0 or value > 65535:
|
||||
raise ValueError("type must be between >= 0 and <= 65535")
|
||||
return value
|
||||
|
||||
|
||||
def to_text(value):
|
||||
"""Convert a DNS rdata type to text.
|
||||
@param value: the rdata type value
|
||||
@type value: int
|
||||
@raises ValueError: the rdata type value is not >= 0 and <= 65535
|
||||
@rtype: string"""
|
||||
|
||||
if value < 0 or value > 65535:
|
||||
raise ValueError("type must be between >= 0 and <= 65535")
|
||||
text = _by_value.get(value)
|
||||
if text is None:
|
||||
text = 'TYPE' + repr(value)
|
||||
return text
|
||||
|
||||
|
||||
def is_metatype(rdtype):
|
||||
"""True if the type is a metatype.
|
||||
@param rdtype: the type
|
||||
@type rdtype: int
|
||||
@rtype: bool"""
|
||||
|
||||
if rdtype >= TKEY and rdtype <= ANY or rdtype in _metatypes:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_singleton(rdtype):
|
||||
"""True if the type is a singleton.
|
||||
@param rdtype: the type
|
||||
@type rdtype: int
|
||||
@rtype: bool"""
|
||||
|
||||
if rdtype in _singletons:
|
||||
return True
|
||||
return False
|
53
lib/dns/rdtypes/ANY/AFSDB.py
Normal file
53
lib/dns/rdtypes/ANY/AFSDB.py
Normal file
@@ -0,0 +1,53 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.mxbase
|
||||
|
||||
|
||||
class AFSDB(dns.rdtypes.mxbase.UncompressedDowncasingMX):
|
||||
|
||||
"""AFSDB record
|
||||
|
||||
@ivar subtype: the subtype value
|
||||
@type subtype: int
|
||||
@ivar hostname: the hostname name
|
||||
@type hostname: dns.name.Name object"""
|
||||
|
||||
# Use the property mechanism to make "subtype" an alias for the
|
||||
# "preference" attribute, and "hostname" an alias for the "exchange"
|
||||
# attribute.
|
||||
#
|
||||
# This lets us inherit the UncompressedMX implementation but lets
|
||||
# the caller use appropriate attribute names for the rdata type.
|
||||
#
|
||||
# We probably lose some performance vs. a cut-and-paste
|
||||
# implementation, but this way we don't copy code, and that's
|
||||
# good.
|
||||
|
||||
def get_subtype(self):
|
||||
return self.preference
|
||||
|
||||
def set_subtype(self, subtype):
|
||||
self.preference = subtype
|
||||
|
||||
subtype = property(get_subtype, set_subtype)
|
||||
|
||||
def get_hostname(self):
|
||||
return self.exchange
|
||||
|
||||
def set_hostname(self, hostname):
|
||||
self.exchange = hostname
|
||||
|
||||
hostname = property(get_hostname, set_hostname)
|
74
lib/dns/rdtypes/ANY/CAA.py
Normal file
74
lib/dns/rdtypes/ANY/CAA.py
Normal file
@@ -0,0 +1,74 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
import dns.rdata
|
||||
import dns.tokenizer
|
||||
|
||||
|
||||
class CAA(dns.rdata.Rdata):
|
||||
|
||||
"""CAA (Certification Authority Authorization) record
|
||||
|
||||
@ivar flags: the flags
|
||||
@type flags: int
|
||||
@ivar tag: the tag
|
||||
@type tag: string
|
||||
@ivar value: the value
|
||||
@type value: string
|
||||
@see: RFC 6844"""
|
||||
|
||||
__slots__ = ['flags', 'tag', 'value']
|
||||
|
||||
def __init__(self, rdclass, rdtype, flags, tag, value):
|
||||
super(CAA, self).__init__(rdclass, rdtype)
|
||||
self.flags = flags
|
||||
self.tag = tag
|
||||
self.value = value
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
return '%u %s "%s"' % (self.flags,
|
||||
dns.rdata._escapify(self.tag),
|
||||
dns.rdata._escapify(self.value))
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
flags = tok.get_uint8()
|
||||
tag = tok.get_string().encode()
|
||||
if len(tag) > 255:
|
||||
raise dns.exception.SyntaxError("tag too long")
|
||||
if not tag.isalnum():
|
||||
raise dns.exception.SyntaxError("tag is not alphanumeric")
|
||||
value = tok.get_string().encode()
|
||||
return cls(rdclass, rdtype, flags, tag, value)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
file.write(struct.pack('!B', self.flags))
|
||||
l = len(self.tag)
|
||||
assert l < 256
|
||||
file.write(struct.pack('!B', l))
|
||||
file.write(self.tag)
|
||||
file.write(self.value)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
(flags, l) = struct.unpack('!BB', wire[current: current + 2])
|
||||
current += 2
|
||||
tag = wire[current: current + l]
|
||||
value = wire[current + l:current + rdlen - 2]
|
||||
return cls(rdclass, rdtype, flags, tag, value)
|
||||
|
25
lib/dns/rdtypes/ANY/CDNSKEY.py
Normal file
25
lib/dns/rdtypes/ANY/CDNSKEY.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.dnskeybase
|
||||
from dns.rdtypes.dnskeybase import flags_to_text_set, flags_from_text_set
|
||||
|
||||
|
||||
__all__ = ['flags_to_text_set', 'flags_from_text_set']
|
||||
|
||||
|
||||
class CDNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase):
|
||||
|
||||
"""CDNSKEY record"""
|
21
lib/dns/rdtypes/ANY/CDS.py
Normal file
21
lib/dns/rdtypes/ANY/CDS.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.dsbase
|
||||
|
||||
|
||||
class CDS(dns.rdtypes.dsbase.DSBase):
|
||||
|
||||
"""CDS record"""
|
122
lib/dns/rdtypes/ANY/CERT.py
Normal file
122
lib/dns/rdtypes/ANY/CERT.py
Normal file
@@ -0,0 +1,122 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import struct
|
||||
import base64
|
||||
|
||||
import dns.exception
|
||||
import dns.dnssec
|
||||
import dns.rdata
|
||||
import dns.tokenizer
|
||||
|
||||
_ctype_by_value = {
|
||||
1: 'PKIX',
|
||||
2: 'SPKI',
|
||||
3: 'PGP',
|
||||
253: 'URI',
|
||||
254: 'OID',
|
||||
}
|
||||
|
||||
_ctype_by_name = {
|
||||
'PKIX': 1,
|
||||
'SPKI': 2,
|
||||
'PGP': 3,
|
||||
'URI': 253,
|
||||
'OID': 254,
|
||||
}
|
||||
|
||||
|
||||
def _ctype_from_text(what):
|
||||
v = _ctype_by_name.get(what)
|
||||
if v is not None:
|
||||
return v
|
||||
return int(what)
|
||||
|
||||
|
||||
def _ctype_to_text(what):
|
||||
v = _ctype_by_value.get(what)
|
||||
if v is not None:
|
||||
return v
|
||||
return str(what)
|
||||
|
||||
|
||||
class CERT(dns.rdata.Rdata):
|
||||
|
||||
"""CERT record
|
||||
|
||||
@ivar certificate_type: certificate type
|
||||
@type certificate_type: int
|
||||
@ivar key_tag: key tag
|
||||
@type key_tag: int
|
||||
@ivar algorithm: algorithm
|
||||
@type algorithm: int
|
||||
@ivar certificate: the certificate or CRL
|
||||
@type certificate: string
|
||||
@see: RFC 2538"""
|
||||
|
||||
__slots__ = ['certificate_type', 'key_tag', 'algorithm', 'certificate']
|
||||
|
||||
def __init__(self, rdclass, rdtype, certificate_type, key_tag, algorithm,
|
||||
certificate):
|
||||
super(CERT, self).__init__(rdclass, rdtype)
|
||||
self.certificate_type = certificate_type
|
||||
self.key_tag = key_tag
|
||||
self.algorithm = algorithm
|
||||
self.certificate = certificate
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
certificate_type = _ctype_to_text(self.certificate_type)
|
||||
return "%s %d %s %s" % (certificate_type, self.key_tag,
|
||||
dns.dnssec.algorithm_to_text(self.algorithm),
|
||||
dns.rdata._base64ify(self.certificate))
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
certificate_type = _ctype_from_text(tok.get_string())
|
||||
key_tag = tok.get_uint16()
|
||||
algorithm = dns.dnssec.algorithm_from_text(tok.get_string())
|
||||
if algorithm < 0 or algorithm > 255:
|
||||
raise dns.exception.SyntaxError("bad algorithm type")
|
||||
chunks = []
|
||||
while 1:
|
||||
t = tok.get().unescape()
|
||||
if t.is_eol_or_eof():
|
||||
break
|
||||
if not t.is_identifier():
|
||||
raise dns.exception.SyntaxError
|
||||
chunks.append(t.value.encode())
|
||||
b64 = b''.join(chunks)
|
||||
certificate = base64.b64decode(b64)
|
||||
return cls(rdclass, rdtype, certificate_type, key_tag,
|
||||
algorithm, certificate)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
prefix = struct.pack("!HHB", self.certificate_type, self.key_tag,
|
||||
self.algorithm)
|
||||
file.write(prefix)
|
||||
file.write(self.certificate)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
prefix = wire[current: current + 5].unwrap()
|
||||
current += 5
|
||||
rdlen -= 5
|
||||
if rdlen < 0:
|
||||
raise dns.exception.FormError
|
||||
(certificate_type, key_tag, algorithm) = struct.unpack("!HHB", prefix)
|
||||
certificate = wire[current: current + rdlen].unwrap()
|
||||
return cls(rdclass, rdtype, certificate_type, key_tag, algorithm,
|
||||
certificate)
|
||||
|
25
lib/dns/rdtypes/ANY/CNAME.py
Normal file
25
lib/dns/rdtypes/ANY/CNAME.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.nsbase
|
||||
|
||||
|
||||
class CNAME(dns.rdtypes.nsbase.NSBase):
|
||||
|
||||
"""CNAME record
|
||||
|
||||
Note: although CNAME is officially a singleton type, dnspython allows
|
||||
non-singleton CNAME rdatasets because such sets have been commonly
|
||||
used by BIND and other nameservers for load balancing."""
|
124
lib/dns/rdtypes/ANY/CSYNC.py
Normal file
124
lib/dns/rdtypes/ANY/CSYNC.py
Normal file
@@ -0,0 +1,124 @@
|
||||
# Copyright (C) 2004-2007, 2009-2011, 2016 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
import dns.rdata
|
||||
import dns.rdatatype
|
||||
import dns.name
|
||||
from dns._compat import xrange
|
||||
|
||||
class CSYNC(dns.rdata.Rdata):
|
||||
|
||||
"""CSYNC record
|
||||
|
||||
@ivar serial: the SOA serial number
|
||||
@type serial: int
|
||||
@ivar flags: the CSYNC flags
|
||||
@type flags: int
|
||||
@ivar windows: the windowed bitmap list
|
||||
@type windows: list of (window number, string) tuples"""
|
||||
|
||||
__slots__ = ['serial', 'flags', 'windows']
|
||||
|
||||
def __init__(self, rdclass, rdtype, serial, flags, windows):
|
||||
super(CSYNC, self).__init__(rdclass, rdtype)
|
||||
self.serial = serial
|
||||
self.flags = flags
|
||||
self.windows = windows
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
text = ''
|
||||
for (window, bitmap) in self.windows:
|
||||
bits = []
|
||||
for i in xrange(0, len(bitmap)):
|
||||
byte = bitmap[i]
|
||||
for j in xrange(0, 8):
|
||||
if byte & (0x80 >> j):
|
||||
bits.append(dns.rdatatype.to_text(window * 256 +
|
||||
i * 8 + j))
|
||||
text += (' ' + ' '.join(bits))
|
||||
return '%d %d%s' % (self.serial, self.flags, text)
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
serial = tok.get_uint32()
|
||||
flags = tok.get_uint16()
|
||||
rdtypes = []
|
||||
while 1:
|
||||
token = tok.get().unescape()
|
||||
if token.is_eol_or_eof():
|
||||
break
|
||||
nrdtype = dns.rdatatype.from_text(token.value)
|
||||
if nrdtype == 0:
|
||||
raise dns.exception.SyntaxError("CSYNC with bit 0")
|
||||
if nrdtype > 65535:
|
||||
raise dns.exception.SyntaxError("CSYNC with bit > 65535")
|
||||
rdtypes.append(nrdtype)
|
||||
rdtypes.sort()
|
||||
window = 0
|
||||
octets = 0
|
||||
prior_rdtype = 0
|
||||
bitmap = bytearray(b'\0' * 32)
|
||||
windows = []
|
||||
for nrdtype in rdtypes:
|
||||
if nrdtype == prior_rdtype:
|
||||
continue
|
||||
prior_rdtype = nrdtype
|
||||
new_window = nrdtype // 256
|
||||
if new_window != window:
|
||||
windows.append((window, bitmap[0:octets]))
|
||||
bitmap = bytearray(b'\0' * 32)
|
||||
window = new_window
|
||||
offset = nrdtype % 256
|
||||
byte = offset // 8
|
||||
bit = offset % 8
|
||||
octets = byte + 1
|
||||
bitmap[byte] = bitmap[byte] | (0x80 >> bit)
|
||||
|
||||
windows.append((window, bitmap[0:octets]))
|
||||
return cls(rdclass, rdtype, serial, flags, windows)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
file.write(struct.pack('!IH', self.serial, self.flags))
|
||||
for (window, bitmap) in self.windows:
|
||||
file.write(struct.pack('!BB', window, len(bitmap)))
|
||||
file.write(bitmap)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
if rdlen < 6:
|
||||
raise dns.exception.FormError("CSYNC too short")
|
||||
(serial, flags) = struct.unpack("!IH", wire[current: current + 6])
|
||||
current += 6
|
||||
rdlen -= 6
|
||||
windows = []
|
||||
while rdlen > 0:
|
||||
if rdlen < 3:
|
||||
raise dns.exception.FormError("CSYNC too short")
|
||||
window = wire[current]
|
||||
octets = wire[current + 1]
|
||||
if octets == 0 or octets > 32:
|
||||
raise dns.exception.FormError("bad CSYNC octets")
|
||||
current += 2
|
||||
rdlen -= 2
|
||||
if rdlen < octets:
|
||||
raise dns.exception.FormError("bad CSYNC bitmap length")
|
||||
bitmap = bytearray(wire[current: current + octets].unwrap())
|
||||
current += octets
|
||||
rdlen -= octets
|
||||
windows.append((window, bitmap))
|
||||
return cls(rdclass, rdtype, serial, flags, windows)
|
21
lib/dns/rdtypes/ANY/DLV.py
Normal file
21
lib/dns/rdtypes/ANY/DLV.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (C) 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.dsbase
|
||||
|
||||
|
||||
class DLV(dns.rdtypes.dsbase.DSBase):
|
||||
|
||||
"""DLV record"""
|
24
lib/dns/rdtypes/ANY/DNAME.py
Normal file
24
lib/dns/rdtypes/ANY/DNAME.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.nsbase
|
||||
|
||||
|
||||
class DNAME(dns.rdtypes.nsbase.UncompressedNS):
|
||||
|
||||
"""DNAME record"""
|
||||
|
||||
def to_digestable(self, origin=None):
|
||||
return self.target.to_digestable(origin)
|
25
lib/dns/rdtypes/ANY/DNSKEY.py
Normal file
25
lib/dns/rdtypes/ANY/DNSKEY.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.dnskeybase
|
||||
from dns.rdtypes.dnskeybase import flags_to_text_set, flags_from_text_set
|
||||
|
||||
|
||||
__all__ = ['flags_to_text_set', 'flags_from_text_set']
|
||||
|
||||
|
||||
class DNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase):
|
||||
|
||||
"""DNSKEY record"""
|
21
lib/dns/rdtypes/ANY/DS.py
Normal file
21
lib/dns/rdtypes/ANY/DS.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.dsbase
|
||||
|
||||
|
||||
class DS(dns.rdtypes.dsbase.DSBase):
|
||||
|
||||
"""DS record"""
|
29
lib/dns/rdtypes/ANY/EUI48.py
Normal file
29
lib/dns/rdtypes/ANY/EUI48.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Copyright (C) 2015 Red Hat, Inc.
|
||||
# Author: Petr Spacek <pspacek@redhat.com>
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.euibase
|
||||
|
||||
|
||||
class EUI48(dns.rdtypes.euibase.EUIBase):
|
||||
|
||||
"""EUI48 record
|
||||
|
||||
@ivar fingerprint: 48-bit Extended Unique Identifier (EUI-48)
|
||||
@type fingerprint: string
|
||||
@see: rfc7043.txt"""
|
||||
|
||||
byte_len = 6 # 0123456789ab (in hex)
|
||||
text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab
|
29
lib/dns/rdtypes/ANY/EUI64.py
Normal file
29
lib/dns/rdtypes/ANY/EUI64.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Copyright (C) 2015 Red Hat, Inc.
|
||||
# Author: Petr Spacek <pspacek@redhat.com>
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.euibase
|
||||
|
||||
|
||||
class EUI64(dns.rdtypes.euibase.EUIBase):
|
||||
|
||||
"""EUI64 record
|
||||
|
||||
@ivar fingerprint: 64-bit Extended Unique Identifier (EUI-64)
|
||||
@type fingerprint: string
|
||||
@see: rfc7043.txt"""
|
||||
|
||||
byte_len = 8 # 0123456789abcdef (in hex)
|
||||
text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab-cd-ef
|
160
lib/dns/rdtypes/ANY/GPOS.py
Normal file
160
lib/dns/rdtypes/ANY/GPOS.py
Normal file
@@ -0,0 +1,160 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
import dns.rdata
|
||||
import dns.tokenizer
|
||||
from dns._compat import long, text_type
|
||||
|
||||
|
||||
def _validate_float_string(what):
|
||||
if what[0] == b'-'[0] or what[0] == b'+'[0]:
|
||||
what = what[1:]
|
||||
if what.isdigit():
|
||||
return
|
||||
(left, right) = what.split(b'.')
|
||||
if left == b'' and right == b'':
|
||||
raise dns.exception.FormError
|
||||
if not left == b'' and not left.decode().isdigit():
|
||||
raise dns.exception.FormError
|
||||
if not right == b'' and not right.decode().isdigit():
|
||||
raise dns.exception.FormError
|
||||
|
||||
|
||||
def _sanitize(value):
|
||||
if isinstance(value, text_type):
|
||||
return value.encode()
|
||||
return value
|
||||
|
||||
|
||||
class GPOS(dns.rdata.Rdata):
|
||||
|
||||
"""GPOS record
|
||||
|
||||
@ivar latitude: latitude
|
||||
@type latitude: string
|
||||
@ivar longitude: longitude
|
||||
@type longitude: string
|
||||
@ivar altitude: altitude
|
||||
@type altitude: string
|
||||
@see: RFC 1712"""
|
||||
|
||||
__slots__ = ['latitude', 'longitude', 'altitude']
|
||||
|
||||
def __init__(self, rdclass, rdtype, latitude, longitude, altitude):
|
||||
super(GPOS, self).__init__(rdclass, rdtype)
|
||||
if isinstance(latitude, float) or \
|
||||
isinstance(latitude, int) or \
|
||||
isinstance(latitude, long):
|
||||
latitude = str(latitude)
|
||||
if isinstance(longitude, float) or \
|
||||
isinstance(longitude, int) or \
|
||||
isinstance(longitude, long):
|
||||
longitude = str(longitude)
|
||||
if isinstance(altitude, float) or \
|
||||
isinstance(altitude, int) or \
|
||||
isinstance(altitude, long):
|
||||
altitude = str(altitude)
|
||||
latitude = _sanitize(latitude)
|
||||
longitude = _sanitize(longitude)
|
||||
altitude = _sanitize(altitude)
|
||||
_validate_float_string(latitude)
|
||||
_validate_float_string(longitude)
|
||||
_validate_float_string(altitude)
|
||||
self.latitude = latitude
|
||||
self.longitude = longitude
|
||||
self.altitude = altitude
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
return '%s %s %s' % (self.latitude.decode(),
|
||||
self.longitude.decode(),
|
||||
self.altitude.decode())
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
latitude = tok.get_string()
|
||||
longitude = tok.get_string()
|
||||
altitude = tok.get_string()
|
||||
tok.get_eol()
|
||||
return cls(rdclass, rdtype, latitude, longitude, altitude)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
l = len(self.latitude)
|
||||
assert l < 256
|
||||
file.write(struct.pack('!B', l))
|
||||
file.write(self.latitude)
|
||||
l = len(self.longitude)
|
||||
assert l < 256
|
||||
file.write(struct.pack('!B', l))
|
||||
file.write(self.longitude)
|
||||
l = len(self.altitude)
|
||||
assert l < 256
|
||||
file.write(struct.pack('!B', l))
|
||||
file.write(self.altitude)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
l = wire[current]
|
||||
current += 1
|
||||
rdlen -= 1
|
||||
if l > rdlen:
|
||||
raise dns.exception.FormError
|
||||
latitude = wire[current: current + l].unwrap()
|
||||
current += l
|
||||
rdlen -= l
|
||||
l = wire[current]
|
||||
current += 1
|
||||
rdlen -= 1
|
||||
if l > rdlen:
|
||||
raise dns.exception.FormError
|
||||
longitude = wire[current: current + l].unwrap()
|
||||
current += l
|
||||
rdlen -= l
|
||||
l = wire[current]
|
||||
current += 1
|
||||
rdlen -= 1
|
||||
if l != rdlen:
|
||||
raise dns.exception.FormError
|
||||
altitude = wire[current: current + l].unwrap()
|
||||
return cls(rdclass, rdtype, latitude, longitude, altitude)
|
||||
|
||||
def _get_float_latitude(self):
|
||||
return float(self.latitude)
|
||||
|
||||
def _set_float_latitude(self, value):
|
||||
self.latitude = str(value)
|
||||
|
||||
float_latitude = property(_get_float_latitude, _set_float_latitude,
|
||||
doc="latitude as a floating point value")
|
||||
|
||||
def _get_float_longitude(self):
|
||||
return float(self.longitude)
|
||||
|
||||
def _set_float_longitude(self, value):
|
||||
self.longitude = str(value)
|
||||
|
||||
float_longitude = property(_get_float_longitude, _set_float_longitude,
|
||||
doc="longitude as a floating point value")
|
||||
|
||||
def _get_float_altitude(self):
|
||||
return float(self.altitude)
|
||||
|
||||
def _set_float_altitude(self, value):
|
||||
self.altitude = str(value)
|
||||
|
||||
float_altitude = property(_get_float_altitude, _set_float_altitude,
|
||||
doc="altitude as a floating point value")
|
85
lib/dns/rdtypes/ANY/HINFO.py
Normal file
85
lib/dns/rdtypes/ANY/HINFO.py
Normal file
@@ -0,0 +1,85 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
import dns.rdata
|
||||
import dns.tokenizer
|
||||
from dns._compat import text_type
|
||||
|
||||
|
||||
class HINFO(dns.rdata.Rdata):
|
||||
|
||||
"""HINFO record
|
||||
|
||||
@ivar cpu: the CPU type
|
||||
@type cpu: string
|
||||
@ivar os: the OS type
|
||||
@type os: string
|
||||
@see: RFC 1035"""
|
||||
|
||||
__slots__ = ['cpu', 'os']
|
||||
|
||||
def __init__(self, rdclass, rdtype, cpu, os):
|
||||
super(HINFO, self).__init__(rdclass, rdtype)
|
||||
if isinstance(cpu, text_type):
|
||||
self.cpu = cpu.encode()
|
||||
else:
|
||||
self.cpu = cpu
|
||||
if isinstance(os, text_type):
|
||||
self.os = os.encode()
|
||||
else:
|
||||
self.os = os
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
return '"%s" "%s"' % (dns.rdata._escapify(self.cpu),
|
||||
dns.rdata._escapify(self.os))
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
cpu = tok.get_string()
|
||||
os = tok.get_string()
|
||||
tok.get_eol()
|
||||
return cls(rdclass, rdtype, cpu, os)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
l = len(self.cpu)
|
||||
assert l < 256
|
||||
file.write(struct.pack('!B', l))
|
||||
file.write(self.cpu)
|
||||
l = len(self.os)
|
||||
assert l < 256
|
||||
file.write(struct.pack('!B', l))
|
||||
file.write(self.os)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
l = wire[current]
|
||||
current += 1
|
||||
rdlen -= 1
|
||||
if l > rdlen:
|
||||
raise dns.exception.FormError
|
||||
cpu = wire[current:current + l].unwrap()
|
||||
current += l
|
||||
rdlen -= l
|
||||
l = wire[current]
|
||||
current += 1
|
||||
rdlen -= 1
|
||||
if l != rdlen:
|
||||
raise dns.exception.FormError
|
||||
os = wire[current: current + l].unwrap()
|
||||
return cls(rdclass, rdtype, cpu, os)
|
||||
|
113
lib/dns/rdtypes/ANY/HIP.py
Normal file
113
lib/dns/rdtypes/ANY/HIP.py
Normal file
@@ -0,0 +1,113 @@
|
||||
# Copyright (C) 2010, 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import struct
|
||||
import base64
|
||||
import binascii
|
||||
|
||||
import dns.exception
|
||||
import dns.rdata
|
||||
import dns.rdatatype
|
||||
|
||||
|
||||
class HIP(dns.rdata.Rdata):
|
||||
|
||||
"""HIP record
|
||||
|
||||
@ivar hit: the host identity tag
|
||||
@type hit: string
|
||||
@ivar algorithm: the public key cryptographic algorithm
|
||||
@type algorithm: int
|
||||
@ivar key: the public key
|
||||
@type key: string
|
||||
@ivar servers: the rendezvous servers
|
||||
@type servers: list of dns.name.Name objects
|
||||
@see: RFC 5205"""
|
||||
|
||||
__slots__ = ['hit', 'algorithm', 'key', 'servers']
|
||||
|
||||
def __init__(self, rdclass, rdtype, hit, algorithm, key, servers):
|
||||
super(HIP, self).__init__(rdclass, rdtype)
|
||||
self.hit = hit
|
||||
self.algorithm = algorithm
|
||||
self.key = key
|
||||
self.servers = servers
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
hit = binascii.hexlify(self.hit).decode()
|
||||
key = base64.b64encode(self.key).replace(b'\n', b'').decode()
|
||||
text = u''
|
||||
servers = []
|
||||
for server in self.servers:
|
||||
servers.append(server.choose_relativity(origin, relativize))
|
||||
if len(servers) > 0:
|
||||
text += (u' ' + u' '.join(map(lambda x: x.to_unicode(), servers)))
|
||||
return u'%u %s %s%s' % (self.algorithm, hit, key, text)
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
algorithm = tok.get_uint8()
|
||||
hit = binascii.unhexlify(tok.get_string().encode())
|
||||
if len(hit) > 255:
|
||||
raise dns.exception.SyntaxError("HIT too long")
|
||||
key = base64.b64decode(tok.get_string().encode())
|
||||
servers = []
|
||||
while 1:
|
||||
token = tok.get()
|
||||
if token.is_eol_or_eof():
|
||||
break
|
||||
server = dns.name.from_text(token.value, origin)
|
||||
server.choose_relativity(origin, relativize)
|
||||
servers.append(server)
|
||||
return cls(rdclass, rdtype, hit, algorithm, key, servers)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
lh = len(self.hit)
|
||||
lk = len(self.key)
|
||||
file.write(struct.pack("!BBH", lh, self.algorithm, lk))
|
||||
file.write(self.hit)
|
||||
file.write(self.key)
|
||||
for server in self.servers:
|
||||
server.to_wire(file, None, origin)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
(lh, algorithm, lk) = struct.unpack('!BBH',
|
||||
wire[current: current + 4])
|
||||
current += 4
|
||||
rdlen -= 4
|
||||
hit = wire[current: current + lh].unwrap()
|
||||
current += lh
|
||||
rdlen -= lh
|
||||
key = wire[current: current + lk].unwrap()
|
||||
current += lk
|
||||
rdlen -= lk
|
||||
servers = []
|
||||
while rdlen > 0:
|
||||
(server, cused) = dns.name.from_wire(wire[: current + rdlen],
|
||||
current)
|
||||
current += cused
|
||||
rdlen -= cused
|
||||
if origin is not None:
|
||||
server = server.relativize(origin)
|
||||
servers.append(server)
|
||||
return cls(rdclass, rdtype, hit, algorithm, key, servers)
|
||||
|
||||
def choose_relativity(self, origin=None, relativize=True):
|
||||
servers = []
|
||||
for server in self.servers:
|
||||
server = server.choose_relativity(origin, relativize)
|
||||
servers.append(server)
|
||||
self.servers = servers
|
98
lib/dns/rdtypes/ANY/ISDN.py
Normal file
98
lib/dns/rdtypes/ANY/ISDN.py
Normal file
@@ -0,0 +1,98 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
import dns.rdata
|
||||
import dns.tokenizer
|
||||
from dns._compat import text_type
|
||||
|
||||
|
||||
class ISDN(dns.rdata.Rdata):
|
||||
|
||||
"""ISDN record
|
||||
|
||||
@ivar address: the ISDN address
|
||||
@type address: string
|
||||
@ivar subaddress: the ISDN subaddress (or '' if not present)
|
||||
@type subaddress: string
|
||||
@see: RFC 1183"""
|
||||
|
||||
__slots__ = ['address', 'subaddress']
|
||||
|
||||
def __init__(self, rdclass, rdtype, address, subaddress):
|
||||
super(ISDN, self).__init__(rdclass, rdtype)
|
||||
if isinstance(address, text_type):
|
||||
self.address = address.encode()
|
||||
else:
|
||||
self.address = address
|
||||
if isinstance(address, text_type):
|
||||
self.subaddress = subaddress.encode()
|
||||
else:
|
||||
self.subaddress = subaddress
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
if self.subaddress:
|
||||
return '"%s" "%s"' % (dns.rdata._escapify(self.address),
|
||||
dns.rdata._escapify(self.subaddress))
|
||||
else:
|
||||
return '"%s"' % dns.rdata._escapify(self.address)
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
address = tok.get_string()
|
||||
t = tok.get()
|
||||
if not t.is_eol_or_eof():
|
||||
tok.unget(t)
|
||||
subaddress = tok.get_string()
|
||||
else:
|
||||
tok.unget(t)
|
||||
subaddress = ''
|
||||
tok.get_eol()
|
||||
return cls(rdclass, rdtype, address, subaddress)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
l = len(self.address)
|
||||
assert l < 256
|
||||
file.write(struct.pack('!B', l))
|
||||
file.write(self.address)
|
||||
l = len(self.subaddress)
|
||||
if l > 0:
|
||||
assert l < 256
|
||||
file.write(struct.pack('!B', l))
|
||||
file.write(self.subaddress)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
l = wire[current]
|
||||
current += 1
|
||||
rdlen -= 1
|
||||
if l > rdlen:
|
||||
raise dns.exception.FormError
|
||||
address = wire[current: current + l].unwrap()
|
||||
current += l
|
||||
rdlen -= l
|
||||
if rdlen > 0:
|
||||
l = wire[current]
|
||||
current += 1
|
||||
rdlen -= 1
|
||||
if l != rdlen:
|
||||
raise dns.exception.FormError
|
||||
subaddress = wire[current: current + l].unwrap()
|
||||
else:
|
||||
subaddress = ''
|
||||
return cls(rdclass, rdtype, address, subaddress)
|
||||
|
327
lib/dns/rdtypes/ANY/LOC.py
Normal file
327
lib/dns/rdtypes/ANY/LOC.py
Normal file
@@ -0,0 +1,327 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
import dns.rdata
|
||||
from dns._compat import long, xrange
|
||||
|
||||
|
||||
_pows = tuple(long(10**i) for i in range(0, 11))
|
||||
|
||||
# default values are in centimeters
|
||||
_default_size = 100.0
|
||||
_default_hprec = 1000000.0
|
||||
_default_vprec = 1000.0
|
||||
|
||||
|
||||
def _exponent_of(what, desc):
|
||||
if what == 0:
|
||||
return 0
|
||||
exp = None
|
||||
for i in xrange(len(_pows)):
|
||||
if what // _pows[i] == long(0):
|
||||
exp = i - 1
|
||||
break
|
||||
if exp is None or exp < 0:
|
||||
raise dns.exception.SyntaxError("%s value out of bounds" % desc)
|
||||
return exp
|
||||
|
||||
|
||||
def _float_to_tuple(what):
|
||||
if what < 0:
|
||||
sign = -1
|
||||
what *= -1
|
||||
else:
|
||||
sign = 1
|
||||
what = long(round(what * 3600000))
|
||||
degrees = int(what // 3600000)
|
||||
what -= degrees * 3600000
|
||||
minutes = int(what // 60000)
|
||||
what -= minutes * 60000
|
||||
seconds = int(what // 1000)
|
||||
what -= int(seconds * 1000)
|
||||
what = int(what)
|
||||
return (degrees, minutes, seconds, what, sign)
|
||||
|
||||
|
||||
def _tuple_to_float(what):
|
||||
value = float(what[0])
|
||||
value += float(what[1]) / 60.0
|
||||
value += float(what[2]) / 3600.0
|
||||
value += float(what[3]) / 3600000.0
|
||||
return float(what[4]) * value
|
||||
|
||||
|
||||
def _encode_size(what, desc):
|
||||
what = long(what)
|
||||
exponent = _exponent_of(what, desc) & 0xF
|
||||
base = what // pow(10, exponent) & 0xF
|
||||
return base * 16 + exponent
|
||||
|
||||
|
||||
def _decode_size(what, desc):
|
||||
exponent = what & 0x0F
|
||||
if exponent > 9:
|
||||
raise dns.exception.SyntaxError("bad %s exponent" % desc)
|
||||
base = (what & 0xF0) >> 4
|
||||
if base > 9:
|
||||
raise dns.exception.SyntaxError("bad %s base" % desc)
|
||||
return long(base) * pow(10, exponent)
|
||||
|
||||
|
||||
class LOC(dns.rdata.Rdata):
|
||||
|
||||
"""LOC record
|
||||
|
||||
@ivar latitude: latitude
|
||||
@type latitude: (int, int, int, int, sign) tuple specifying the degrees, minutes,
|
||||
seconds, milliseconds, and sign of the coordinate.
|
||||
@ivar longitude: longitude
|
||||
@type longitude: (int, int, int, int, sign) tuple specifying the degrees,
|
||||
minutes, seconds, milliseconds, and sign of the coordinate.
|
||||
@ivar altitude: altitude
|
||||
@type altitude: float
|
||||
@ivar size: size of the sphere
|
||||
@type size: float
|
||||
@ivar horizontal_precision: horizontal precision
|
||||
@type horizontal_precision: float
|
||||
@ivar vertical_precision: vertical precision
|
||||
@type vertical_precision: float
|
||||
@see: RFC 1876"""
|
||||
|
||||
__slots__ = ['latitude', 'longitude', 'altitude', 'size',
|
||||
'horizontal_precision', 'vertical_precision']
|
||||
|
||||
def __init__(self, rdclass, rdtype, latitude, longitude, altitude,
|
||||
size=_default_size, hprec=_default_hprec,
|
||||
vprec=_default_vprec):
|
||||
"""Initialize a LOC record instance.
|
||||
|
||||
The parameters I{latitude} and I{longitude} may be either a 4-tuple
|
||||
of integers specifying (degrees, minutes, seconds, milliseconds),
|
||||
or they may be floating point values specifying the number of
|
||||
degrees. The other parameters are floats. Size, horizontal precision,
|
||||
and vertical precision are specified in centimeters."""
|
||||
|
||||
super(LOC, self).__init__(rdclass, rdtype)
|
||||
if isinstance(latitude, int) or isinstance(latitude, long):
|
||||
latitude = float(latitude)
|
||||
if isinstance(latitude, float):
|
||||
latitude = _float_to_tuple(latitude)
|
||||
self.latitude = latitude
|
||||
if isinstance(longitude, int) or isinstance(longitude, long):
|
||||
longitude = float(longitude)
|
||||
if isinstance(longitude, float):
|
||||
longitude = _float_to_tuple(longitude)
|
||||
self.longitude = longitude
|
||||
self.altitude = float(altitude)
|
||||
self.size = float(size)
|
||||
self.horizontal_precision = float(hprec)
|
||||
self.vertical_precision = float(vprec)
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
if self.latitude[4] > 0:
|
||||
lat_hemisphere = 'N'
|
||||
lat_degrees = self.latitude[0]
|
||||
else:
|
||||
lat_hemisphere = 'S'
|
||||
lat_degrees = -1 * self.latitude[0]
|
||||
if self.longitude[4] > 0:
|
||||
long_hemisphere = 'E'
|
||||
long_degrees = self.longitude[0]
|
||||
else:
|
||||
long_hemisphere = 'W'
|
||||
long_degrees = -1 * self.longitude[0]
|
||||
text = "%d %d %d.%03d %s %d %d %d.%03d %s %0.2fm" % (
|
||||
self.latitude[0], self.latitude[1],
|
||||
self.latitude[2], self.latitude[3], lat_hemisphere,
|
||||
self.longitude[0], self.longitude[1], self.longitude[2],
|
||||
self.longitude[3], long_hemisphere,
|
||||
self.altitude / 100.0
|
||||
)
|
||||
|
||||
# do not print default values
|
||||
if self.size != _default_size or \
|
||||
self.horizontal_precision != _default_hprec or \
|
||||
self.vertical_precision != _default_vprec:
|
||||
text += " %0.2fm %0.2fm %0.2fm" % (
|
||||
self.size / 100.0, self.horizontal_precision / 100.0,
|
||||
self.vertical_precision / 100.0
|
||||
)
|
||||
return text
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
latitude = [0, 0, 0, 0, 1]
|
||||
longitude = [0, 0, 0, 0, 1]
|
||||
size = _default_size
|
||||
hprec = _default_hprec
|
||||
vprec = _default_vprec
|
||||
|
||||
latitude[0] = tok.get_int()
|
||||
t = tok.get_string()
|
||||
if t.isdigit():
|
||||
latitude[1] = int(t)
|
||||
t = tok.get_string()
|
||||
if '.' in t:
|
||||
(seconds, milliseconds) = t.split('.')
|
||||
if not seconds.isdigit():
|
||||
raise dns.exception.SyntaxError(
|
||||
'bad latitude seconds value')
|
||||
latitude[2] = int(seconds)
|
||||
if latitude[2] >= 60:
|
||||
raise dns.exception.SyntaxError('latitude seconds >= 60')
|
||||
l = len(milliseconds)
|
||||
if l == 0 or l > 3 or not milliseconds.isdigit():
|
||||
raise dns.exception.SyntaxError(
|
||||
'bad latitude milliseconds value')
|
||||
if l == 1:
|
||||
m = 100
|
||||
elif l == 2:
|
||||
m = 10
|
||||
else:
|
||||
m = 1
|
||||
latitude[3] = m * int(milliseconds)
|
||||
t = tok.get_string()
|
||||
elif t.isdigit():
|
||||
latitude[2] = int(t)
|
||||
t = tok.get_string()
|
||||
if t == 'S':
|
||||
latitude[4] = -1
|
||||
elif t != 'N':
|
||||
raise dns.exception.SyntaxError('bad latitude hemisphere value')
|
||||
|
||||
longitude[0] = tok.get_int()
|
||||
t = tok.get_string()
|
||||
if t.isdigit():
|
||||
longitude[1] = int(t)
|
||||
t = tok.get_string()
|
||||
if '.' in t:
|
||||
(seconds, milliseconds) = t.split('.')
|
||||
if not seconds.isdigit():
|
||||
raise dns.exception.SyntaxError(
|
||||
'bad longitude seconds value')
|
||||
longitude[2] = int(seconds)
|
||||
if longitude[2] >= 60:
|
||||
raise dns.exception.SyntaxError('longitude seconds >= 60')
|
||||
l = len(milliseconds)
|
||||
if l == 0 or l > 3 or not milliseconds.isdigit():
|
||||
raise dns.exception.SyntaxError(
|
||||
'bad longitude milliseconds value')
|
||||
if l == 1:
|
||||
m = 100
|
||||
elif l == 2:
|
||||
m = 10
|
||||
else:
|
||||
m = 1
|
||||
longitude[3] = m * int(milliseconds)
|
||||
t = tok.get_string()
|
||||
elif t.isdigit():
|
||||
longitude[2] = int(t)
|
||||
t = tok.get_string()
|
||||
if t == 'W':
|
||||
longitude[4] = -1
|
||||
elif t != 'E':
|
||||
raise dns.exception.SyntaxError('bad longitude hemisphere value')
|
||||
|
||||
t = tok.get_string()
|
||||
if t[-1] == 'm':
|
||||
t = t[0: -1]
|
||||
altitude = float(t) * 100.0 # m -> cm
|
||||
|
||||
token = tok.get().unescape()
|
||||
if not token.is_eol_or_eof():
|
||||
value = token.value
|
||||
if value[-1] == 'm':
|
||||
value = value[0: -1]
|
||||
size = float(value) * 100.0 # m -> cm
|
||||
token = tok.get().unescape()
|
||||
if not token.is_eol_or_eof():
|
||||
value = token.value
|
||||
if value[-1] == 'm':
|
||||
value = value[0: -1]
|
||||
hprec = float(value) * 100.0 # m -> cm
|
||||
token = tok.get().unescape()
|
||||
if not token.is_eol_or_eof():
|
||||
value = token.value
|
||||
if value[-1] == 'm':
|
||||
value = value[0: -1]
|
||||
vprec = float(value) * 100.0 # m -> cm
|
||||
tok.get_eol()
|
||||
|
||||
return cls(rdclass, rdtype, latitude, longitude, altitude,
|
||||
size, hprec, vprec)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
milliseconds = (self.latitude[0] * 3600000 +
|
||||
self.latitude[1] * 60000 +
|
||||
self.latitude[2] * 1000 +
|
||||
self.latitude[3]) * self.latitude[4]
|
||||
latitude = long(0x80000000) + milliseconds
|
||||
milliseconds = (self.longitude[0] * 3600000 +
|
||||
self.longitude[1] * 60000 +
|
||||
self.longitude[2] * 1000 +
|
||||
self.longitude[3]) * self.longitude[4]
|
||||
longitude = long(0x80000000) + milliseconds
|
||||
altitude = long(self.altitude) + long(10000000)
|
||||
size = _encode_size(self.size, "size")
|
||||
hprec = _encode_size(self.horizontal_precision, "horizontal precision")
|
||||
vprec = _encode_size(self.vertical_precision, "vertical precision")
|
||||
wire = struct.pack("!BBBBIII", 0, size, hprec, vprec, latitude,
|
||||
longitude, altitude)
|
||||
file.write(wire)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
(version, size, hprec, vprec, latitude, longitude, altitude) = \
|
||||
struct.unpack("!BBBBIII", wire[current: current + rdlen])
|
||||
if latitude > long(0x80000000):
|
||||
latitude = float(latitude - long(0x80000000)) / 3600000
|
||||
else:
|
||||
latitude = -1 * float(long(0x80000000) - latitude) / 3600000
|
||||
if latitude < -90.0 or latitude > 90.0:
|
||||
raise dns.exception.FormError("bad latitude")
|
||||
if longitude > long(0x80000000):
|
||||
longitude = float(longitude - long(0x80000000)) / 3600000
|
||||
else:
|
||||
longitude = -1 * float(long(0x80000000) - longitude) / 3600000
|
||||
if longitude < -180.0 or longitude > 180.0:
|
||||
raise dns.exception.FormError("bad longitude")
|
||||
altitude = float(altitude) - 10000000.0
|
||||
size = _decode_size(size, "size")
|
||||
hprec = _decode_size(hprec, "horizontal precision")
|
||||
vprec = _decode_size(vprec, "vertical precision")
|
||||
return cls(rdclass, rdtype, latitude, longitude, altitude,
|
||||
size, hprec, vprec)
|
||||
|
||||
def _get_float_latitude(self):
|
||||
return _tuple_to_float(self.latitude)
|
||||
|
||||
def _set_float_latitude(self, value):
|
||||
self.latitude = _float_to_tuple(value)
|
||||
|
||||
float_latitude = property(_get_float_latitude, _set_float_latitude,
|
||||
doc="latitude as a floating point value")
|
||||
|
||||
def _get_float_longitude(self):
|
||||
return _tuple_to_float(self.longitude)
|
||||
|
||||
def _set_float_longitude(self, value):
|
||||
self.longitude = _float_to_tuple(value)
|
||||
|
||||
float_longitude = property(_get_float_longitude, _set_float_longitude,
|
||||
doc="longitude as a floating point value")
|
21
lib/dns/rdtypes/ANY/MX.py
Normal file
21
lib/dns/rdtypes/ANY/MX.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.mxbase
|
||||
|
||||
|
||||
class MX(dns.rdtypes.mxbase.MXBase):
|
||||
|
||||
"""MX record"""
|
21
lib/dns/rdtypes/ANY/NS.py
Normal file
21
lib/dns/rdtypes/ANY/NS.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.nsbase
|
||||
|
||||
|
||||
class NS(dns.rdtypes.nsbase.NSBase):
|
||||
|
||||
"""NS record"""
|
126
lib/dns/rdtypes/ANY/NSEC.py
Normal file
126
lib/dns/rdtypes/ANY/NSEC.py
Normal file
@@ -0,0 +1,126 @@
|
||||
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
import dns.rdata
|
||||
import dns.rdatatype
|
||||
import dns.name
|
||||
from dns._compat import xrange
|
||||
|
||||
|
||||
class NSEC(dns.rdata.Rdata):
|
||||
|
||||
"""NSEC record
|
||||
|
||||
@ivar next: the next name
|
||||
@type next: dns.name.Name object
|
||||
@ivar windows: the windowed bitmap list
|
||||
@type windows: list of (window number, string) tuples"""
|
||||
|
||||
__slots__ = ['next', 'windows']
|
||||
|
||||
def __init__(self, rdclass, rdtype, next, windows):
|
||||
super(NSEC, self).__init__(rdclass, rdtype)
|
||||
self.next = next
|
||||
self.windows = windows
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
next = self.next.choose_relativity(origin, relativize)
|
||||
text = ''
|
||||
for (window, bitmap) in self.windows:
|
||||
bits = []
|
||||
for i in xrange(0, len(bitmap)):
|
||||
byte = bitmap[i]
|
||||
for j in xrange(0, 8):
|
||||
if byte & (0x80 >> j):
|
||||
bits.append(dns.rdatatype.to_text(window * 256 +
|
||||
i * 8 + j))
|
||||
text += (' ' + ' '.join(bits))
|
||||
return '%s%s' % (next, text)
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
next = tok.get_name()
|
||||
next = next.choose_relativity(origin, relativize)
|
||||
rdtypes = []
|
||||
while 1:
|
||||
token = tok.get().unescape()
|
||||
if token.is_eol_or_eof():
|
||||
break
|
||||
nrdtype = dns.rdatatype.from_text(token.value)
|
||||
if nrdtype == 0:
|
||||
raise dns.exception.SyntaxError("NSEC with bit 0")
|
||||
if nrdtype > 65535:
|
||||
raise dns.exception.SyntaxError("NSEC with bit > 65535")
|
||||
rdtypes.append(nrdtype)
|
||||
rdtypes.sort()
|
||||
window = 0
|
||||
octets = 0
|
||||
prior_rdtype = 0
|
||||
bitmap = bytearray(b'\0' * 32)
|
||||
windows = []
|
||||
for nrdtype in rdtypes:
|
||||
if nrdtype == prior_rdtype:
|
||||
continue
|
||||
prior_rdtype = nrdtype
|
||||
new_window = nrdtype // 256
|
||||
if new_window != window:
|
||||
windows.append((window, bitmap[0:octets]))
|
||||
bitmap = bytearray(b'\0' * 32)
|
||||
window = new_window
|
||||
offset = nrdtype % 256
|
||||
byte = offset // 8
|
||||
bit = offset % 8
|
||||
octets = byte + 1
|
||||
bitmap[byte] = bitmap[byte] | (0x80 >> bit)
|
||||
|
||||
windows.append((window, bitmap[0:octets]))
|
||||
return cls(rdclass, rdtype, next, windows)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
self.next.to_wire(file, None, origin)
|
||||
for (window, bitmap) in self.windows:
|
||||
file.write(struct.pack('!BB', window, len(bitmap)))
|
||||
file.write(bitmap)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
(next, cused) = dns.name.from_wire(wire[: current + rdlen], current)
|
||||
current += cused
|
||||
rdlen -= cused
|
||||
windows = []
|
||||
while rdlen > 0:
|
||||
if rdlen < 3:
|
||||
raise dns.exception.FormError("NSEC too short")
|
||||
window = wire[current]
|
||||
octets = wire[current + 1]
|
||||
if octets == 0 or octets > 32:
|
||||
raise dns.exception.FormError("bad NSEC octets")
|
||||
current += 2
|
||||
rdlen -= 2
|
||||
if rdlen < octets:
|
||||
raise dns.exception.FormError("bad NSEC bitmap length")
|
||||
bitmap = bytearray(wire[current: current + octets].unwrap())
|
||||
current += octets
|
||||
rdlen -= octets
|
||||
windows.append((window, bitmap))
|
||||
if origin is not None:
|
||||
next = next.relativize(origin)
|
||||
return cls(rdclass, rdtype, next, windows)
|
||||
|
||||
def choose_relativity(self, origin=None, relativize=True):
|
||||
self.next = self.next.choose_relativity(origin, relativize)
|
192
lib/dns/rdtypes/ANY/NSEC3.py
Normal file
192
lib/dns/rdtypes/ANY/NSEC3.py
Normal file
@@ -0,0 +1,192 @@
|
||||
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import base64
|
||||
import binascii
|
||||
import string
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
import dns.rdata
|
||||
import dns.rdatatype
|
||||
from dns._compat import xrange, text_type
|
||||
|
||||
try:
|
||||
b32_hex_to_normal = string.maketrans('0123456789ABCDEFGHIJKLMNOPQRSTUV',
|
||||
'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567')
|
||||
b32_normal_to_hex = string.maketrans('ABCDEFGHIJKLMNOPQRSTUVWXYZ234567',
|
||||
'0123456789ABCDEFGHIJKLMNOPQRSTUV')
|
||||
except AttributeError:
|
||||
b32_hex_to_normal = bytes.maketrans(b'0123456789ABCDEFGHIJKLMNOPQRSTUV',
|
||||
b'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567')
|
||||
b32_normal_to_hex = bytes.maketrans(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567',
|
||||
b'0123456789ABCDEFGHIJKLMNOPQRSTUV')
|
||||
|
||||
# hash algorithm constants
|
||||
SHA1 = 1
|
||||
|
||||
# flag constants
|
||||
OPTOUT = 1
|
||||
|
||||
|
||||
class NSEC3(dns.rdata.Rdata):
|
||||
|
||||
"""NSEC3 record
|
||||
|
||||
@ivar algorithm: the hash algorithm number
|
||||
@type algorithm: int
|
||||
@ivar flags: the flags
|
||||
@type flags: int
|
||||
@ivar iterations: the number of iterations
|
||||
@type iterations: int
|
||||
@ivar salt: the salt
|
||||
@type salt: string
|
||||
@ivar next: the next name hash
|
||||
@type next: string
|
||||
@ivar windows: the windowed bitmap list
|
||||
@type windows: list of (window number, string) tuples"""
|
||||
|
||||
__slots__ = ['algorithm', 'flags', 'iterations', 'salt', 'next', 'windows']
|
||||
|
||||
def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt,
|
||||
next, windows):
|
||||
super(NSEC3, self).__init__(rdclass, rdtype)
|
||||
self.algorithm = algorithm
|
||||
self.flags = flags
|
||||
self.iterations = iterations
|
||||
if isinstance(salt, text_type):
|
||||
self.salt = salt.encode()
|
||||
else:
|
||||
self.salt = salt
|
||||
self.next = next
|
||||
self.windows = windows
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
next = base64.b32encode(self.next).translate(
|
||||
b32_normal_to_hex).lower().decode()
|
||||
if self.salt == b'':
|
||||
salt = '-'
|
||||
else:
|
||||
salt = binascii.hexlify(self.salt).decode()
|
||||
text = u''
|
||||
for (window, bitmap) in self.windows:
|
||||
bits = []
|
||||
for i in xrange(0, len(bitmap)):
|
||||
byte = bitmap[i]
|
||||
for j in xrange(0, 8):
|
||||
if byte & (0x80 >> j):
|
||||
bits.append(dns.rdatatype.to_text(window * 256 +
|
||||
i * 8 + j))
|
||||
text += (u' ' + u' '.join(bits))
|
||||
return u'%u %u %u %s %s%s' % (self.algorithm, self.flags,
|
||||
self.iterations, salt, next, text)
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
algorithm = tok.get_uint8()
|
||||
flags = tok.get_uint8()
|
||||
iterations = tok.get_uint16()
|
||||
salt = tok.get_string()
|
||||
if salt == u'-':
|
||||
salt = b''
|
||||
else:
|
||||
salt = binascii.unhexlify(salt.encode('ascii'))
|
||||
next = tok.get_string().encode(
|
||||
'ascii').upper().translate(b32_hex_to_normal)
|
||||
next = base64.b32decode(next)
|
||||
rdtypes = []
|
||||
while 1:
|
||||
token = tok.get().unescape()
|
||||
if token.is_eol_or_eof():
|
||||
break
|
||||
nrdtype = dns.rdatatype.from_text(token.value)
|
||||
if nrdtype == 0:
|
||||
raise dns.exception.SyntaxError("NSEC3 with bit 0")
|
||||
if nrdtype > 65535:
|
||||
raise dns.exception.SyntaxError("NSEC3 with bit > 65535")
|
||||
rdtypes.append(nrdtype)
|
||||
rdtypes.sort()
|
||||
window = 0
|
||||
octets = 0
|
||||
prior_rdtype = 0
|
||||
bitmap = bytearray(b'\0' * 32)
|
||||
windows = []
|
||||
for nrdtype in rdtypes:
|
||||
if nrdtype == prior_rdtype:
|
||||
continue
|
||||
prior_rdtype = nrdtype
|
||||
new_window = nrdtype // 256
|
||||
if new_window != window:
|
||||
if octets != 0:
|
||||
windows.append((window, ''.join(bitmap[0:octets])))
|
||||
bitmap = bytearray(b'\0' * 32)
|
||||
window = new_window
|
||||
offset = nrdtype % 256
|
||||
byte = offset // 8
|
||||
bit = offset % 8
|
||||
octets = byte + 1
|
||||
bitmap[byte] = bitmap[byte] | (0x80 >> bit)
|
||||
if octets != 0:
|
||||
windows.append((window, bitmap[0:octets]))
|
||||
return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next,
|
||||
windows)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
l = len(self.salt)
|
||||
file.write(struct.pack("!BBHB", self.algorithm, self.flags,
|
||||
self.iterations, l))
|
||||
file.write(self.salt)
|
||||
l = len(self.next)
|
||||
file.write(struct.pack("!B", l))
|
||||
file.write(self.next)
|
||||
for (window, bitmap) in self.windows:
|
||||
file.write(struct.pack("!BB", window, len(bitmap)))
|
||||
file.write(bitmap)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
(algorithm, flags, iterations, slen) = \
|
||||
struct.unpack('!BBHB', wire[current: current + 5])
|
||||
|
||||
current += 5
|
||||
rdlen -= 5
|
||||
salt = wire[current: current + slen].unwrap()
|
||||
current += slen
|
||||
rdlen -= slen
|
||||
nlen = wire[current]
|
||||
current += 1
|
||||
rdlen -= 1
|
||||
next = wire[current: current + nlen].unwrap()
|
||||
current += nlen
|
||||
rdlen -= nlen
|
||||
windows = []
|
||||
while rdlen > 0:
|
||||
if rdlen < 3:
|
||||
raise dns.exception.FormError("NSEC3 too short")
|
||||
window = wire[current]
|
||||
octets = wire[current + 1]
|
||||
if octets == 0 or octets > 32:
|
||||
raise dns.exception.FormError("bad NSEC3 octets")
|
||||
current += 2
|
||||
rdlen -= 2
|
||||
if rdlen < octets:
|
||||
raise dns.exception.FormError("bad NSEC3 bitmap length")
|
||||
bitmap = bytearray(wire[current: current + octets].unwrap())
|
||||
current += octets
|
||||
rdlen -= octets
|
||||
windows.append((window, bitmap))
|
||||
return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next,
|
||||
windows)
|
||||
|
89
lib/dns/rdtypes/ANY/NSEC3PARAM.py
Normal file
89
lib/dns/rdtypes/ANY/NSEC3PARAM.py
Normal file
@@ -0,0 +1,89 @@
|
||||
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import struct
|
||||
import binascii
|
||||
|
||||
import dns.exception
|
||||
import dns.rdata
|
||||
from dns._compat import text_type
|
||||
|
||||
|
||||
class NSEC3PARAM(dns.rdata.Rdata):
|
||||
|
||||
"""NSEC3PARAM record
|
||||
|
||||
@ivar algorithm: the hash algorithm number
|
||||
@type algorithm: int
|
||||
@ivar flags: the flags
|
||||
@type flags: int
|
||||
@ivar iterations: the number of iterations
|
||||
@type iterations: int
|
||||
@ivar salt: the salt
|
||||
@type salt: string"""
|
||||
|
||||
__slots__ = ['algorithm', 'flags', 'iterations', 'salt']
|
||||
|
||||
def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt):
|
||||
super(NSEC3PARAM, self).__init__(rdclass, rdtype)
|
||||
self.algorithm = algorithm
|
||||
self.flags = flags
|
||||
self.iterations = iterations
|
||||
if isinstance(salt, text_type):
|
||||
self.salt = salt.encode()
|
||||
else:
|
||||
self.salt = salt
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
if self.salt == b'':
|
||||
salt = '-'
|
||||
else:
|
||||
salt = binascii.hexlify(self.salt).decode()
|
||||
return '%u %u %u %s' % (self.algorithm, self.flags, self.iterations,
|
||||
salt)
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
algorithm = tok.get_uint8()
|
||||
flags = tok.get_uint8()
|
||||
iterations = tok.get_uint16()
|
||||
salt = tok.get_string()
|
||||
if salt == '-':
|
||||
salt = ''
|
||||
else:
|
||||
salt = binascii.unhexlify(salt.encode())
|
||||
tok.get_eol()
|
||||
return cls(rdclass, rdtype, algorithm, flags, iterations, salt)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
l = len(self.salt)
|
||||
file.write(struct.pack("!BBHB", self.algorithm, self.flags,
|
||||
self.iterations, l))
|
||||
file.write(self.salt)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
(algorithm, flags, iterations, slen) = \
|
||||
struct.unpack('!BBHB',
|
||||
wire[current: current + 5])
|
||||
current += 5
|
||||
rdlen -= 5
|
||||
salt = wire[current: current + slen].unwrap()
|
||||
current += slen
|
||||
rdlen -= slen
|
||||
if rdlen != 0:
|
||||
raise dns.exception.FormError
|
||||
return cls(rdclass, rdtype, algorithm, flags, iterations, salt)
|
||||
|
21
lib/dns/rdtypes/ANY/PTR.py
Normal file
21
lib/dns/rdtypes/ANY/PTR.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.nsbase
|
||||
|
||||
|
||||
class PTR(dns.rdtypes.nsbase.NSBase):
|
||||
|
||||
"""PTR record"""
|
80
lib/dns/rdtypes/ANY/RP.py
Normal file
80
lib/dns/rdtypes/ANY/RP.py
Normal file
@@ -0,0 +1,80 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.exception
|
||||
import dns.rdata
|
||||
import dns.name
|
||||
|
||||
|
||||
class RP(dns.rdata.Rdata):
|
||||
|
||||
"""RP record
|
||||
|
||||
@ivar mbox: The responsible person's mailbox
|
||||
@type mbox: dns.name.Name object
|
||||
@ivar txt: The owner name of a node with TXT records, or the root name
|
||||
if no TXT records are associated with this RP.
|
||||
@type txt: dns.name.Name object
|
||||
@see: RFC 1183"""
|
||||
|
||||
__slots__ = ['mbox', 'txt']
|
||||
|
||||
def __init__(self, rdclass, rdtype, mbox, txt):
|
||||
super(RP, self).__init__(rdclass, rdtype)
|
||||
self.mbox = mbox
|
||||
self.txt = txt
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
mbox = self.mbox.choose_relativity(origin, relativize)
|
||||
txt = self.txt.choose_relativity(origin, relativize)
|
||||
return "%s %s" % (str(mbox), str(txt))
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
mbox = tok.get_name()
|
||||
txt = tok.get_name()
|
||||
mbox = mbox.choose_relativity(origin, relativize)
|
||||
txt = txt.choose_relativity(origin, relativize)
|
||||
tok.get_eol()
|
||||
return cls(rdclass, rdtype, mbox, txt)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
self.mbox.to_wire(file, None, origin)
|
||||
self.txt.to_wire(file, None, origin)
|
||||
|
||||
def to_digestable(self, origin=None):
|
||||
return self.mbox.to_digestable(origin) + \
|
||||
self.txt.to_digestable(origin)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
(mbox, cused) = dns.name.from_wire(wire[: current + rdlen],
|
||||
current)
|
||||
current += cused
|
||||
rdlen -= cused
|
||||
if rdlen <= 0:
|
||||
raise dns.exception.FormError
|
||||
(txt, cused) = dns.name.from_wire(wire[: current + rdlen],
|
||||
current)
|
||||
if cused != rdlen:
|
||||
raise dns.exception.FormError
|
||||
if origin is not None:
|
||||
mbox = mbox.relativize(origin)
|
||||
txt = txt.relativize(origin)
|
||||
return cls(rdclass, rdtype, mbox, txt)
|
||||
|
||||
def choose_relativity(self, origin=None, relativize=True):
|
||||
self.mbox = self.mbox.choose_relativity(origin, relativize)
|
||||
self.txt = self.txt.choose_relativity(origin, relativize)
|
156
lib/dns/rdtypes/ANY/RRSIG.py
Normal file
156
lib/dns/rdtypes/ANY/RRSIG.py
Normal file
@@ -0,0 +1,156 @@
|
||||
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import base64
|
||||
import calendar
|
||||
import struct
|
||||
import time
|
||||
|
||||
import dns.dnssec
|
||||
import dns.exception
|
||||
import dns.rdata
|
||||
import dns.rdatatype
|
||||
|
||||
|
||||
class BadSigTime(dns.exception.DNSException):
|
||||
|
||||
"""Time in DNS SIG or RRSIG resource record cannot be parsed."""
|
||||
|
||||
|
||||
def sigtime_to_posixtime(what):
|
||||
if len(what) != 14:
|
||||
raise BadSigTime
|
||||
year = int(what[0:4])
|
||||
month = int(what[4:6])
|
||||
day = int(what[6:8])
|
||||
hour = int(what[8:10])
|
||||
minute = int(what[10:12])
|
||||
second = int(what[12:14])
|
||||
return calendar.timegm((year, month, day, hour, minute, second,
|
||||
0, 0, 0))
|
||||
|
||||
|
||||
def posixtime_to_sigtime(what):
|
||||
return time.strftime('%Y%m%d%H%M%S', time.gmtime(what))
|
||||
|
||||
|
||||
class RRSIG(dns.rdata.Rdata):
|
||||
|
||||
"""RRSIG record
|
||||
|
||||
@ivar type_covered: the rdata type this signature covers
|
||||
@type type_covered: int
|
||||
@ivar algorithm: the algorithm used for the sig
|
||||
@type algorithm: int
|
||||
@ivar labels: number of labels
|
||||
@type labels: int
|
||||
@ivar original_ttl: the original TTL
|
||||
@type original_ttl: long
|
||||
@ivar expiration: signature expiration time
|
||||
@type expiration: long
|
||||
@ivar inception: signature inception time
|
||||
@type inception: long
|
||||
@ivar key_tag: the key tag
|
||||
@type key_tag: int
|
||||
@ivar signer: the signer
|
||||
@type signer: dns.name.Name object
|
||||
@ivar signature: the signature
|
||||
@type signature: string"""
|
||||
|
||||
__slots__ = ['type_covered', 'algorithm', 'labels', 'original_ttl',
|
||||
'expiration', 'inception', 'key_tag', 'signer',
|
||||
'signature']
|
||||
|
||||
def __init__(self, rdclass, rdtype, type_covered, algorithm, labels,
|
||||
original_ttl, expiration, inception, key_tag, signer,
|
||||
signature):
|
||||
super(RRSIG, self).__init__(rdclass, rdtype)
|
||||
self.type_covered = type_covered
|
||||
self.algorithm = algorithm
|
||||
self.labels = labels
|
||||
self.original_ttl = original_ttl
|
||||
self.expiration = expiration
|
||||
self.inception = inception
|
||||
self.key_tag = key_tag
|
||||
self.signer = signer
|
||||
self.signature = signature
|
||||
|
||||
def covers(self):
|
||||
return self.type_covered
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
return '%s %d %d %d %s %s %d %s %s' % (
|
||||
dns.rdatatype.to_text(self.type_covered),
|
||||
self.algorithm,
|
||||
self.labels,
|
||||
self.original_ttl,
|
||||
posixtime_to_sigtime(self.expiration),
|
||||
posixtime_to_sigtime(self.inception),
|
||||
self.key_tag,
|
||||
self.signer.choose_relativity(origin, relativize),
|
||||
dns.rdata._base64ify(self.signature)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
type_covered = dns.rdatatype.from_text(tok.get_string())
|
||||
algorithm = dns.dnssec.algorithm_from_text(tok.get_string())
|
||||
labels = tok.get_int()
|
||||
original_ttl = tok.get_ttl()
|
||||
expiration = sigtime_to_posixtime(tok.get_string())
|
||||
inception = sigtime_to_posixtime(tok.get_string())
|
||||
key_tag = tok.get_int()
|
||||
signer = tok.get_name()
|
||||
signer = signer.choose_relativity(origin, relativize)
|
||||
chunks = []
|
||||
while 1:
|
||||
t = tok.get().unescape()
|
||||
if t.is_eol_or_eof():
|
||||
break
|
||||
if not t.is_identifier():
|
||||
raise dns.exception.SyntaxError
|
||||
chunks.append(t.value.encode())
|
||||
b64 = b''.join(chunks)
|
||||
signature = base64.b64decode(b64)
|
||||
return cls(rdclass, rdtype, type_covered, algorithm, labels,
|
||||
original_ttl, expiration, inception, key_tag, signer,
|
||||
signature)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
header = struct.pack('!HBBIIIH', self.type_covered,
|
||||
self.algorithm, self.labels,
|
||||
self.original_ttl, self.expiration,
|
||||
self.inception, self.key_tag)
|
||||
file.write(header)
|
||||
self.signer.to_wire(file, None, origin)
|
||||
file.write(self.signature)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
header = struct.unpack('!HBBIIIH', wire[current: current + 18])
|
||||
current += 18
|
||||
rdlen -= 18
|
||||
(signer, cused) = dns.name.from_wire(wire[: current + rdlen], current)
|
||||
current += cused
|
||||
rdlen -= cused
|
||||
if origin is not None:
|
||||
signer = signer.relativize(origin)
|
||||
signature = wire[current: current + rdlen].unwrap()
|
||||
return cls(rdclass, rdtype, header[0], header[1], header[2],
|
||||
header[3], header[4], header[5], header[6], signer,
|
||||
signature)
|
||||
|
||||
def choose_relativity(self, origin=None, relativize=True):
|
||||
self.signer = self.signer.choose_relativity(origin, relativize)
|
21
lib/dns/rdtypes/ANY/RT.py
Normal file
21
lib/dns/rdtypes/ANY/RT.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.mxbase
|
||||
|
||||
|
||||
class RT(dns.rdtypes.mxbase.UncompressedDowncasingMX):
|
||||
|
||||
"""RT record"""
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user