Compare commits
278 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
006c778dca | ||
![]() |
bd636b756b | ||
![]() |
d21b74f231 | ||
![]() |
4354f72578 | ||
![]() |
f5ca522e6c | ||
![]() |
ec685407bb | ||
![]() |
be9a1dcf06 | ||
![]() |
1f7e8b4d9a | ||
![]() |
b3da08ce74 | ||
![]() |
50753db4ff | ||
![]() |
af77e51307 | ||
![]() |
b4e8689e92 | ||
![]() |
8ec30a77ff | ||
![]() |
29c7853380 | ||
![]() |
cd417aaf44 | ||
![]() |
428a5cc0ff | ||
![]() |
d128d7c8e6 | ||
![]() |
8027199bd5 | ||
![]() |
099a887cc7 | ||
![]() |
ea41d06023 | ||
![]() |
5147baab05 | ||
![]() |
1c50e615cf | ||
![]() |
ed2d34e979 | ||
![]() |
c404016700 | ||
![]() |
14b0353ba4 | ||
![]() |
fbe136a350 | ||
![]() |
4fcfea943e | ||
![]() |
279d27d081 | ||
![]() |
651125ef2c | ||
![]() |
a5eb0e7faa | ||
![]() |
e85cdd5609 | ||
![]() |
83f80b9288 | ||
![]() |
76fe771d8c | ||
![]() |
c6ad09fe8f | ||
![]() |
a3014638c8 | ||
![]() |
8b15c63b0d | ||
![]() |
589adf4df9 | ||
![]() |
b07d85f233 | ||
![]() |
f0e5855a8e | ||
![]() |
5997aa5cd9 | ||
![]() |
cb2a38addc | ||
![]() |
873f857c82 | ||
![]() |
b9a22461c1 | ||
![]() |
85a02771a7 | ||
![]() |
6a0b0327c3 | ||
![]() |
3742f33d08 | ||
![]() |
82ac33dd75 | ||
![]() |
8c7c0101cd | ||
![]() |
4d6179dfdd | ||
![]() |
ef85fba2e5 | ||
![]() |
960b601384 | ||
![]() |
21d9091b43 | ||
![]() |
e881c32797 | ||
![]() |
57f1af05f5 | ||
![]() |
254f41a2cc | ||
![]() |
59ce3404c9 | ||
![]() |
11c7342299 | ||
![]() |
8b0959aa69 | ||
![]() |
9cd8ed12b9 | ||
![]() |
78b10d7ab5 | ||
![]() |
a322ec2b23 | ||
![]() |
1f23654735 | ||
![]() |
e1112b95c7 | ||
![]() |
4e043109bf | ||
![]() |
58e670443d | ||
![]() |
86a9230da8 | ||
![]() |
86f84766c1 | ||
![]() |
fdc7078e5c | ||
![]() |
c649ebfcc0 | ||
![]() |
6aa0d4cd0b | ||
![]() |
1a2e205c1f | ||
![]() |
5dd04cb8ab | ||
![]() |
62d05e5e08 | ||
![]() |
1c087ec856 | ||
![]() |
010c12da67 | ||
![]() |
9bdac38561 | ||
![]() |
790ca9c90a | ||
![]() |
58f72d2d9c | ||
![]() |
285e6513ed | ||
![]() |
412bc8cf2d | ||
![]() |
45cd8b8a00 | ||
![]() |
ae2227959e | ||
![]() |
b50c92f919 | ||
![]() |
93a1d9c164 | ||
![]() |
0b10e68c60 | ||
![]() |
73ac4076ac | ||
![]() |
5968b82a0b | ||
![]() |
ce1d2a0fd9 | ||
![]() |
de3f813b46 | ||
![]() |
4797b1a3b7 | ||
![]() |
3e996d284d | ||
![]() |
420c5a0836 | ||
![]() |
c6b953055a | ||
![]() |
1cd0c112a6 | ||
![]() |
492d28ea37 | ||
![]() |
4eb7e03b67 | ||
![]() |
e029f329eb | ||
![]() |
47de9a752c | ||
![]() |
51c9aa2887 | ||
![]() |
82499a53d4 | ||
![]() |
df15302f2c | ||
![]() |
039b51262d | ||
![]() |
465add46d4 | ||
![]() |
bce965b402 | ||
![]() |
95ce293169 | ||
![]() |
5d604c2cad | ||
![]() |
ed2d3ca277 | ||
![]() |
0478f40d02 | ||
![]() |
a4be73da3b | ||
![]() |
762192518f | ||
![]() |
fa51df192d | ||
![]() |
1a5cc02097 | ||
![]() |
a07f54ca33 | ||
![]() |
6a8cbe92a9 | ||
![]() |
16d9376ec9 | ||
![]() |
4356f5c72a | ||
![]() |
076dc94292 | ||
![]() |
fbc527010a | ||
![]() |
5b4a22276d | ||
![]() |
b55a563fce | ||
![]() |
2a701a6dfe | ||
![]() |
8931fb4758 | ||
![]() |
2124165319 | ||
![]() |
0d701129a0 | ||
![]() |
ebd8625e1e | ||
![]() |
b68ca67386 | ||
![]() |
17a7019c60 | ||
![]() |
54af92251c | ||
![]() |
d9edeb747d | ||
![]() |
b69b722a37 | ||
![]() |
669c23ea09 | ||
![]() |
2b3ba8e7fa | ||
![]() |
9a761e7d30 | ||
![]() |
9d00e052f0 | ||
![]() |
7c159e97de | ||
![]() |
ba8e4ff33c | ||
![]() |
9b067a437c | ||
![]() |
aba39d06bf | ||
![]() |
469d22a833 | ||
![]() |
43bd49ce5b | ||
![]() |
79dc190ccc | ||
![]() |
495659e9cd | ||
![]() |
2fec2c9e4c | ||
![]() |
9cba66634d | ||
![]() |
b2f63bf231 | ||
![]() |
9c9ef22730 | ||
![]() |
5d84ec3be2 | ||
![]() |
2150961d27 | ||
![]() |
53bca5a3d3 | ||
![]() |
cd3938eb33 | ||
![]() |
eb0b88bfcf | ||
![]() |
b9bbf8bbca | ||
![]() |
65b3d0c0de | ||
![]() |
93b8f32f68 | ||
![]() |
28bb164e8e | ||
![]() |
4911cc76a3 | ||
![]() |
26ac539bc4 | ||
![]() |
75ae6b16a4 | ||
![]() |
2835b1d28f | ||
![]() |
748aad16d7 | ||
![]() |
2c2fbb8583 | ||
![]() |
20edcbf7fa | ||
![]() |
db81dc39ba | ||
![]() |
c3b0aef1ef | ||
![]() |
50e29efdfe | ||
![]() |
285e41bc88 | ||
![]() |
ea9d0fc449 | ||
![]() |
9cdd2eef81 | ||
![]() |
2f8833236a | ||
![]() |
2b680eeb6d | ||
![]() |
809f120db0 | ||
![]() |
6d9ef8bbc3 | ||
![]() |
a26d6ec6bb | ||
![]() |
2d26ced3fc | ||
![]() |
d74cd4bf24 | ||
![]() |
f040d897a7 | ||
![]() |
ed2f87f57b | ||
![]() |
9b9e31f54c | ||
![]() |
b3cfcf660e | ||
![]() |
e5bcd1f94e | ||
![]() |
2b6fa769f7 | ||
![]() |
3ccc82f343 | ||
![]() |
f4273cafb6 | ||
![]() |
59d63f61d9 | ||
![]() |
9d9103a83b | ||
![]() |
0b085b6d03 | ||
![]() |
f77538f179 | ||
![]() |
f7810f7f95 | ||
![]() |
4d28e4603f | ||
![]() |
8b787e4ae0 | ||
![]() |
f5ba168172 | ||
![]() |
1df6dadbdd | ||
![]() |
3dc29144a3 | ||
![]() |
951167ce17 | ||
![]() |
906e4055d8 | ||
![]() |
2f5526388a | ||
![]() |
82341642f4 | ||
![]() |
c96b1eb09d | ||
![]() |
f5bfa67c69 | ||
![]() |
47797ffcd4 | ||
![]() |
a73053e380 | ||
![]() |
bc042fead7 | ||
![]() |
ed6779e937 | ||
![]() |
ee7ca68f87 | ||
![]() |
32693b6378 | ||
![]() |
984e5588c8 | ||
![]() |
a42a1af867 | ||
![]() |
03de680915 | ||
![]() |
8c6e142314 | ||
![]() |
b12bde4f79 | ||
![]() |
1120aa3841 | ||
![]() |
652ca73126 | ||
![]() |
8706e72f6a | ||
![]() |
319d521773 | ||
![]() |
d9474cdcc5 | ||
![]() |
e49a34177a | ||
![]() |
67d203e011 | ||
![]() |
0d38b3de16 | ||
![]() |
38116a14f3 | ||
![]() |
b28f0b65f0 | ||
![]() |
13ab4a9363 | ||
![]() |
7cb7783a34 | ||
![]() |
d1a13dad38 | ||
![]() |
b4e06dea99 | ||
![]() |
0f92dc0fdf | ||
![]() |
6a58895d37 | ||
![]() |
1709a2b7df | ||
![]() |
febb3da0c1 | ||
![]() |
552a428985 | ||
![]() |
38e04bd42a | ||
![]() |
8f0ba5ba4f | ||
![]() |
c67aedceb1 | ||
![]() |
b3a7fbd9b5 | ||
![]() |
29522428de | ||
![]() |
77bd52b2ae | ||
![]() |
d8112e7628 | ||
![]() |
ffa208e73f | ||
![]() |
61ead15c38 | ||
![]() |
407e2ae481 | ||
![]() |
5fb16edf43 | ||
![]() |
8eb5c475bb | ||
![]() |
84090310f7 | ||
![]() |
fc98e2f052 | ||
![]() |
bedcfa9520 | ||
![]() |
bb152b590b | ||
![]() |
3623732cf7 | ||
![]() |
05ba89f164 | ||
![]() |
cb5053476d | ||
![]() |
cee656a053 | ||
![]() |
cfc7d529e1 | ||
![]() |
a93dc68e6c | ||
![]() |
2d91cfd3db | ||
![]() |
36e81f44cb | ||
![]() |
cb0e65337f | ||
![]() |
1c627f4649 | ||
![]() |
16cbfed20b | ||
![]() |
f6a3bc57e2 | ||
![]() |
594443d1dc | ||
![]() |
c3378e1653 | ||
![]() |
bc57dd650c | ||
![]() |
311a8c6fa3 | ||
![]() |
bdb43c0e9e | ||
![]() |
8033b47596 | ||
![]() |
9d5052cc68 | ||
![]() |
f4c9dc8a5f | ||
![]() |
a3f0a78df0 | ||
![]() |
b70363e005 | ||
![]() |
65eab801e8 | ||
![]() |
9e764248d3 | ||
![]() |
a660a1c44b | ||
![]() |
33458c1bdb | ||
![]() |
e5530182cd | ||
![]() |
9ecabc3faf | ||
![]() |
8b58f6b861 | ||
![]() |
9e41bf529d | ||
![]() |
36398fe958 | ||
![]() |
69cfbea5f3 | ||
![]() |
1e1e3beca6 |
1
.gitignore
vendored
@@ -15,6 +15,7 @@
|
||||
version.lock
|
||||
logs/*
|
||||
cache/*
|
||||
*.mmdb
|
||||
|
||||
# HTTPS Cert/Key #
|
||||
##################
|
||||
|
337
API.md
@@ -37,11 +37,11 @@ Get to the chopper!
|
||||
|
||||
|
||||
### backup_config
|
||||
Create a manual backup of the `config.ini` file.
|
||||
Create a manual backup of the `config.ini` file.
|
||||
|
||||
|
||||
### backup_db
|
||||
Create a manual backup of the `plexpy.db` file.
|
||||
Create a manual backup of the `plexpy.db` file.
|
||||
|
||||
|
||||
### delete_all_library_history
|
||||
@@ -142,6 +142,10 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### delete_temp_sessions
|
||||
Flush out all of the temporary sessions in the database.
|
||||
|
||||
|
||||
### delete_user
|
||||
Delete a user from PlexPy. Also erases all history for the user.
|
||||
|
||||
@@ -158,17 +162,21 @@ Returns:
|
||||
|
||||
|
||||
### docs
|
||||
Return the api docs as a dict where commands are keys, docstring are value.
|
||||
Return the api docs as a dict where commands are keys, docstring are value.
|
||||
|
||||
|
||||
### docs_md
|
||||
Return the api docs formatted with markdown.
|
||||
Return the api docs formatted with markdown.
|
||||
|
||||
|
||||
### download_log
|
||||
Download the PlexPy log file.
|
||||
|
||||
|
||||
### download_plex_log
|
||||
Download the Plex log file.
|
||||
|
||||
|
||||
### edit_library
|
||||
Update a library section on PlexPy.
|
||||
|
||||
@@ -318,6 +326,34 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_geoip_lookup
|
||||
Get the geolocation info for an IP address. The GeoLite2 database must be installed.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
ip_address
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"continent": "North America",
|
||||
"country": "United States",
|
||||
"region": "California",
|
||||
"city": "Mountain View",
|
||||
"postal_code": "94035",
|
||||
"timezone": "America/Los_Angeles",
|
||||
"latitude": 37.386,
|
||||
"longitude": -122.0838,
|
||||
"accuracy": 1000
|
||||
}
|
||||
json:
|
||||
{"error": "The address 127.0.0.1 is not in the database."
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_history
|
||||
Get the PlexPy history.
|
||||
|
||||
@@ -543,6 +579,33 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_library
|
||||
Get a library's details.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"child_count": null,
|
||||
"count": 887,
|
||||
"do_notify": 1,
|
||||
"do_notify_created": 1,
|
||||
"keep_history": 1,
|
||||
"library_art": "/:/resources/movie-fanart.jpg",
|
||||
"library_thumb": "/:/resources/movie.png",
|
||||
"parent_count": null,
|
||||
"section_id": 1,
|
||||
"section_name": "Movies",
|
||||
"section_type": "movie"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_library_media_info
|
||||
Get the data on the PlexPy media info tables.
|
||||
|
||||
@@ -619,6 +682,66 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_library_user_stats
|
||||
Get a library's user statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"friendly_name": "Jon Snow",
|
||||
"total_plays": 170,
|
||||
"user_id": 133788,
|
||||
"user_thumb": "https://plex.tv/users/k10w42309cynaopq/avatar"
|
||||
},
|
||||
{"platform_type": "DanyKhaleesi69",
|
||||
"total_plays": 42,
|
||||
"user_id": 8008135,
|
||||
"user_thumb": "https://plex.tv/users/568gwwoib5t98a3a/avatar"
|
||||
},
|
||||
{...},
|
||||
{...}
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
### get_library_watch_time_stats
|
||||
Get a library's watch time statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"query_days": 1,
|
||||
"total_plays": 0,
|
||||
"total_time": 0
|
||||
},
|
||||
{"query_days": 7,
|
||||
"total_plays": 3,
|
||||
"total_time": 15694
|
||||
},
|
||||
{"query_days": 30,
|
||||
"total_plays": 35,
|
||||
"total_time": 63054
|
||||
},
|
||||
{"query_days": 0,
|
||||
"total_plays": 508,
|
||||
"total_time": 1183080
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
### get_logs
|
||||
Get the PlexPy logs.
|
||||
|
||||
@@ -636,11 +759,11 @@ Optional parameters:
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"loglevel": "DEBUG",
|
||||
"msg": "Latest version is 2d10b0748c7fa2ee4cf59960c3d3fffc6aa9512b",
|
||||
"thread": "MainThread",
|
||||
[{"loglevel": "DEBUG",
|
||||
"msg": "Latest version is 2d10b0748c7fa2ee4cf59960c3d3fffc6aa9512b",
|
||||
"thread": "MainThread",
|
||||
"time": "2016-05-08 09:36:51 "
|
||||
},
|
||||
},
|
||||
{...},
|
||||
{...}
|
||||
]
|
||||
@@ -653,7 +776,7 @@ Get the metadata for a media item.
|
||||
```
|
||||
Required parameters:
|
||||
rating_key (str): Rating key of the item
|
||||
media_info (bool): True or False wheter to get media info
|
||||
media_info (bool): True or False whether to get media info
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
@@ -1061,6 +1184,7 @@ Required parameters:
|
||||
count (str): Number of items to return
|
||||
|
||||
Optional parameters:
|
||||
start (str): The item number to start at
|
||||
section_id (str): The id of the Plex library section
|
||||
|
||||
Returns:
|
||||
@@ -1310,6 +1434,35 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_user
|
||||
Get a user's details.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"allow_guest": 1,
|
||||
"deleted_user": 0,
|
||||
"do_notify": 1,
|
||||
"email": "Jon.Snow.1337@CastleBlack.com",
|
||||
"friendly_name": "Jon Snow",
|
||||
"is_allow_sync": 1,
|
||||
"is_home_user": 1,
|
||||
"is_restricted": 0,
|
||||
"keep_history": 1,
|
||||
"shared_libraries": ["10", "1", "4", "5", "15", "20", "2"],
|
||||
"user_id": 133788,
|
||||
"user_thumb": "https://plex.tv/users/k10w42309cynaopq/avatar",
|
||||
"username": "LordCommanderSnow"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### get_user_ips
|
||||
Get the data on PlexPy users IP table.
|
||||
|
||||
@@ -1357,7 +1510,7 @@ Returns:
|
||||
|
||||
|
||||
### get_user_logins
|
||||
Get the data on PlexPy user login table.
|
||||
Get the data on PlexPy user login table.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
@@ -1376,15 +1529,15 @@ Returns:
|
||||
"recordsTotal": 2344,
|
||||
"recordsFiltered": 10,
|
||||
"data":
|
||||
[{"browser": "Safari 7.0.3",
|
||||
"friendly_name": "Jon Snow",
|
||||
"host": "http://plexpy.castleblack.com",
|
||||
"ip_address": "xxx.xxx.xxx.xxx",
|
||||
"os": "Mac OS X",
|
||||
"timestamp": 1462591869,
|
||||
"user": "LordCommanderSnow",
|
||||
"user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A",
|
||||
"user_group": "guest",
|
||||
[{"browser": "Safari 7.0.3",
|
||||
"friendly_name": "Jon Snow",
|
||||
"host": "http://plexpy.castleblack.com",
|
||||
"ip_address": "xxx.xxx.xxx.xxx",
|
||||
"os": "Mac OS X",
|
||||
"timestamp": 1462591869,
|
||||
"user": "LordCommanderSnow",
|
||||
"user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A",
|
||||
"user_group": "guest",
|
||||
"user_id": 133788
|
||||
},
|
||||
{...},
|
||||
@@ -1414,6 +1567,66 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_user_player_stats
|
||||
Get a user's player statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"platform_type": "Chrome",
|
||||
"player_name": "Plex Web (Chrome)",
|
||||
"result_id": 1,
|
||||
"total_plays": 170
|
||||
},
|
||||
{"platform_type": "Chromecast",
|
||||
"player_name": "Chromecast",
|
||||
"result_id": 2,
|
||||
"total_plays": 42
|
||||
},
|
||||
{...},
|
||||
{...}
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
### get_user_watch_time_stats
|
||||
Get a user's watch time statistics.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
user_id (str): The id of the Plex user
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
[{"query_days": 1,
|
||||
"total_plays": 0,
|
||||
"total_time": 0
|
||||
},
|
||||
{"query_days": 7,
|
||||
"total_plays": 3,
|
||||
"total_time": 15694
|
||||
},
|
||||
{"query_days": 30,
|
||||
"total_plays": 35,
|
||||
"total_time": 63054
|
||||
},
|
||||
{"query_days": 0,
|
||||
"total_plays": 508,
|
||||
"total_time": 1183080
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
|
||||
### get_users
|
||||
Get a list of all users that have access to your server.
|
||||
|
||||
@@ -1496,6 +1709,37 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### get_whois_lookup
|
||||
Get the connection info for an IP address.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
ip_address
|
||||
|
||||
Optional parameters:
|
||||
None
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"host": "google-public-dns-a.google.com",
|
||||
"nets": [{"description": "Google Inc.",
|
||||
"address": "1600 Amphitheatre Parkway",
|
||||
"city": "Mountain View",
|
||||
"state": "CA",
|
||||
"postal_code": "94043",
|
||||
"country": "United States",
|
||||
...
|
||||
},
|
||||
{...}
|
||||
]
|
||||
json:
|
||||
{"host": "Not available",
|
||||
"nets": [],
|
||||
"error": "IPv4 address 127.0.0.1 is already defined as Loopback via RFC 1122, Section 3.2.1.3."
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### import_database
|
||||
Import a PlexWatch or Plexivity database into PlexPy.
|
||||
|
||||
@@ -1513,12 +1757,35 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### install_geoip_db
|
||||
Downloads and installs the GeoLite2 database
|
||||
|
||||
|
||||
### notify
|
||||
Send a notification using PlexPy.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
agent_id(str): The id of the notification agent to use
|
||||
9 # Boxcar2
|
||||
17 # Browser
|
||||
10 # Email
|
||||
16 # Facebook
|
||||
0 # Growl
|
||||
19 # Hipchat
|
||||
12 # IFTTT
|
||||
18 # Join
|
||||
4 # NotifyMyAndroid
|
||||
3 # Plex Home Theater
|
||||
1 # Prowl
|
||||
5 # Pushalot
|
||||
6 # Pushbullet
|
||||
7 # Pushover
|
||||
15 # Scripts
|
||||
14 # Slack
|
||||
13 # Telegram
|
||||
11 # Twitter
|
||||
2 # XBMC
|
||||
subject(str): The subject of the message
|
||||
body(str): The body of the message
|
||||
|
||||
@@ -1530,16 +1797,36 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### pms_image_proxy
|
||||
Gets an image from the PMS and saves it to the image cache directory.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
img (str): /library/metadata/153037/thumb/1462175060
|
||||
or
|
||||
rating_key (str): 54321
|
||||
|
||||
Optional parameters:
|
||||
width (str): 150
|
||||
height (str): 255
|
||||
fallback (str): "poster", "cover", "art"
|
||||
refresh (bool): True or False whether to refresh the image cache
|
||||
|
||||
Returns:
|
||||
None
|
||||
```
|
||||
|
||||
|
||||
### refresh_libraries_list
|
||||
Refresh the PlexPy libraries list.
|
||||
Refresh the PlexPy libraries list.
|
||||
|
||||
|
||||
### refresh_users_list
|
||||
Refresh the PlexPy users list.
|
||||
Refresh the PlexPy users list.
|
||||
|
||||
|
||||
### restart
|
||||
Restart PlexPy.
|
||||
Restart PlexPy.
|
||||
|
||||
|
||||
### search
|
||||
@@ -1617,8 +1904,12 @@ Returns:
|
||||
```
|
||||
|
||||
|
||||
### uninstall_geoip_db
|
||||
Uninstalls the GeoLite2 database
|
||||
|
||||
|
||||
### update
|
||||
Check for PlexPy updates on Github.
|
||||
Check for PlexPy updates on Github.
|
||||
|
||||
|
||||
### update_metadata_details
|
||||
|
232
CHANGELOG.md
@@ -1,5 +1,237 @@
|
||||
# Changelog
|
||||
|
||||
## v1.4.23 (2017-09-30)
|
||||
|
||||
* Fix: Playstation 4 platform name.
|
||||
* Fix: PlexWatch and Plexivity import.
|
||||
* Fix: Pushbullet authorization header.
|
||||
|
||||
|
||||
## v1.4.22 (2017-08-19)
|
||||
|
||||
* Fix: Cleaning up of old config backups.
|
||||
* Fix: Temporary fix for incorrect source media info.
|
||||
|
||||
|
||||
## v1.4.21 (2017-07-01)
|
||||
|
||||
* New: Updated donation methods.
|
||||
|
||||
|
||||
## v1.4.20 (2017-06-24)
|
||||
|
||||
* New: Added platform image for the PlexTogether player.
|
||||
* Fix: Corrected math used to calculate human duration. (Thanks @senepa)
|
||||
* Fix: Sorting of 4k in media info tables.
|
||||
* Fix: Update file sizes when refreshing media info tables.
|
||||
* Fix: Support a custom port for Mattermost (Slack) notifications.
|
||||
|
||||
|
||||
## v1.4.19 (2017-05-31)
|
||||
|
||||
* Fix: Video resolution not showing up for transcoded streams on PMS 1.7.x.
|
||||
|
||||
|
||||
## v1.4.18 (2017-04-22)
|
||||
|
||||
* New: Added some new Arnold quotes. (Thanks @senepa)
|
||||
* Fix: Text wrapping in datatable footers.
|
||||
* Fix: API command get_apikey. (Thanks @Hellowlol)
|
||||
|
||||
|
||||
## v1.4.17 (2017-03-04)
|
||||
|
||||
* New: Configurable month range for the Plays by month graph. (Thanks @Pbaboe)
|
||||
* New: Option to chanage the week to start on Monday for the the Plays by day of week graph. (Thanks @Pbaboe)
|
||||
* Fix: Invalid iOS icon file paths. (Thanks @demonbane)
|
||||
* Fix: Plex Web 3.0 URLs on info pages and notifications.
|
||||
* Fix: Update bitcoin donation link to Coinbase.
|
||||
* Fix: Update init scripts. (Thanks @ampsonic)
|
||||
|
||||
|
||||
## v1.4.16 (2016-11-25)
|
||||
|
||||
* Fix: Websocket for new json response on PMS 1.3.0.
|
||||
* Fix: Update stream and transcoder tooltip percent.
|
||||
* Fix: Typo in the edit user modal.
|
||||
|
||||
|
||||
## v1.4.15 (2016-11-11)
|
||||
|
||||
* New: Add stream and transcoder progress percent to the current activity tooltip.
|
||||
* Fix: Refreshing of images in the cache when authentication is disabled.
|
||||
* Fix: Plex.tv authentication with special characters in the username or password.
|
||||
* Fix: Line breaks in the info page summaries.
|
||||
* Fix: Redirect to the proper http root when restarting.
|
||||
* Fix: API result type and responses showing incorrectly. (Thanks @Hellowlol)
|
||||
* Change: Use https URL for app.plex.tv.
|
||||
* Change: Show API traceback errors in the browser with debugging enabled. (Thanks @Hellowlol)
|
||||
* Change: Increase table width on mobile devices and max width set to 1750px. (Thanks @XusBadia)
|
||||
|
||||
|
||||
## v1.4.14 (2016-10-12)
|
||||
|
||||
* Fix: History logging locking up if media is removed from Plex before PlexPy can save the session.
|
||||
* Fix: Unable to save API key in the settings.
|
||||
* Fix: Some typos in the settings. (Thanks @Leafar3456)
|
||||
* Change: Disable script timeout by setting timeout to 0 seconds.
|
||||
|
||||
|
||||
## v1.4.13 (2016-10-08)
|
||||
|
||||
* New: Option to set the number of days to keep PlexPy backups.
|
||||
* New: Option to add a supplementary url to Pushover notifications.
|
||||
* New: Option to set a timeout duration for script notifications.
|
||||
* New: Added flush temporary sessions button to extra settings for emergency use.
|
||||
* New: Added pms_image_proxy to the API.
|
||||
* Fix: Insanely long play durations being recorded when connection to the Plex server is lost.
|
||||
* Fix: Script notification output not being sent to the logger.
|
||||
* Fix: New libraries not being added to homepage automatically.
|
||||
* Fix: Success message shown incorrectly when sending a test notification.
|
||||
* Fix: PlexPy log level filter not working.
|
||||
* Fix: Admin username not shown in login logs.
|
||||
* Fix: FeatHub link in readme document.
|
||||
* Change: Posters disabled by default for all notification agents.
|
||||
* Change: Disable manual changing of the PlexPy API key.
|
||||
* Change: Force refresh the Plex.tv token when fetching a new token.
|
||||
* Change: Script notifications run in a new thread with the timeout setting.
|
||||
* Change: Watched percent moved to general settings.
|
||||
* Change: Use human readable file sizes to the media info tables. (Thanks @logaritmisk)
|
||||
* Change: Update pytz library.
|
||||
|
||||
|
||||
## v1.4.12 (2016-09-18)
|
||||
|
||||
* Fix: PMS update check not working for MacOSX.
|
||||
* Fix: Square covers for music stats on homepage.
|
||||
* Fix: Card width on the homepage for iPhone 6/7 Plus. (Thanks @XusBadia)
|
||||
* Fix: Check for running PID when starting PlexPy. (Thanks @spolyack)
|
||||
* Fix: FreeBSD service script not stopping PlexPy properly.
|
||||
* Fix: Some web UI cleanup.
|
||||
* Change: GitHub repostitory moved.
|
||||
|
||||
|
||||
## v1.4.11 (2016-09-02)
|
||||
|
||||
* Fix: PlexWatch and Plexivity import errors.
|
||||
* Fix: Searching in history datatables.
|
||||
* Fix: Notifications not sending for Local user.
|
||||
|
||||
|
||||
## v1.4.10 (2016-08-15)
|
||||
|
||||
* Fix: Missing python ipaddress module preventing PlexPy from starting.
|
||||
|
||||
|
||||
## v1.4.9 (2016-08-14)
|
||||
|
||||
* New: Option to include current activity in the history tables.
|
||||
* New: ISP lookup info in the IP address modal.
|
||||
* New: Option to disable web page previews for Telegram notifications.
|
||||
* Fix: Send correct JSON header for Slack/Mattermost notifications.
|
||||
* Fix: Twitter and Facebook test notifications incorrectly showing as "failed".
|
||||
* Fix: Current activity progress bars extending past 100%.
|
||||
* Fix: Typo in the setup wizard. (Thanks @wopian)
|
||||
* Fix: Update PMS server version before checking for a new update.
|
||||
* Change: Compare distro and build when checking for server updates.
|
||||
* Change: Nicer y-axis intervals when viewing "Play Duration" graphs.
|
||||
|
||||
|
||||
## v1.4.8 (2016-07-16)
|
||||
|
||||
* New: Setting to specify PlexPy backup interval.
|
||||
* Fix: User Concurrent Streams Notifications by IP Address checkbox not working.
|
||||
* Fix: Substitute {update_version} in fallback PMS update notification text.
|
||||
* Fix: Check version for automatic IP logging setting.
|
||||
* Fix: Use library refresh interval.
|
||||
|
||||
|
||||
## v1.4.7 (2016-07-14)
|
||||
|
||||
* New: Use MaxMind GeoLite2 for IP address lookup.
|
||||
* Note: The GeoLite2 database must be installed from the settings page.
|
||||
* New: Check for Plex updates using plex.tv downloads instead of the server API.
|
||||
* Note: Check for Plex updates has been disabled and must be re-enabled in the settings.
|
||||
* New: More notification options for Plex updates.
|
||||
* New: Notifications for concurrent streams by a single user.
|
||||
* New: Notifications for user streaming from a new device.
|
||||
* New: HipChat notification agent. (Thanks @aboron)
|
||||
* Fix: Username showing as blank when friendly name is blank.
|
||||
* Fix: Direct stream count wrong in the current activity header.
|
||||
* Fix: Current activity reporting direct stream when reducing the stream quality switches to transcoding.
|
||||
* Fix: Apostophe in an Arnold quote causing the shutdown/restart page to crash.
|
||||
* Fix: Disable refreshing posters in guest mode.
|
||||
* Fix: PlexWatch/Plexivity import unable to select the "grouped" database table.
|
||||
* Change: Updated Facebook notification instructions.
|
||||
* Change: Subject line optional for Join notifications.
|
||||
* Change: Line break between subject and body text instead of a colon for Facebook, Slack, Twitter, and Telegram.
|
||||
* Change: Allow Mattermost notifications using the Slack config.
|
||||
* Change: Better formatting for Slack poster notifications.
|
||||
* Change: Telegram only notifies once instead of twice when posters are enabled.
|
||||
* Change: Host Open Sans font locally instead of querying Google Fonts.
|
||||
|
||||
|
||||
## v1.4.6 (2016-06-11)
|
||||
|
||||
* New: Added User and Library statistics to the API.
|
||||
* New: Ability to refresh individual poster images without clearing the entire cache. (Thanks @Hellowlol)
|
||||
* New: Added {added_date}, {updated_date}, and {last_viewed_date} to metadata notification options.
|
||||
* New: Log level filter for Plex logs. (Thanks @sanderploegsma)
|
||||
* New: Log level filter for PlexPy logs.
|
||||
* New: Button to download Plex logs directly from the web interface.
|
||||
* New: Advanced setting in the config file to change the number of Plex log lines retrieved.
|
||||
* Fix: FreeBSD and FreeNAS init scripts to reflect the path in the installation guide. (Thanks @nortron)
|
||||
* Fix: Monitoring crashing when failed to retrieve current activity.
|
||||
|
||||
|
||||
## v1.4.5 (2016-05-25)
|
||||
|
||||
* Fix: PlexPy unable to start if failed to get shared libraries for a user.
|
||||
* Fix: Matching port number when retrieving the PMS url.
|
||||
* Fix: Extract mapped IPv4 address in Plexivity import.
|
||||
* Change: Revert back to internal url when retrieving PMS images.
|
||||
|
||||
|
||||
## v1.4.4 (2016-05-24)
|
||||
|
||||
* Fix: Image queries crashing the PMS when playing clips from channels.
|
||||
* Fix: Plexivity import if IP address is missing.
|
||||
* Fix: Tooltips shown behind the datatable headers.
|
||||
* Fix: Current activity instances rendered in a random order causing them to jump around.
|
||||
|
||||
|
||||
## v1.4.3 (2016-05-22)
|
||||
|
||||
* Fix: PlexPy not starting without any authentication method.
|
||||
|
||||
|
||||
## v1.4.2 (2016-05-22)
|
||||
|
||||
* New: Option to use HTTP basic authentication instead of the HTML login form.
|
||||
* Fix: Unable to save settings when enabling the HTTP proxy setting.
|
||||
* Change: Match the PMS port when retrieving the PMS url.
|
||||
|
||||
|
||||
## v1.4.1 (2016-05-20)
|
||||
|
||||
* New: HTTP Proxy checkbox in the settings. Enable this if using an SSL enabled reverse proxy in front of PlexPy.
|
||||
* Fix: Check for blank username/password on login.
|
||||
* Fix: Persist current activity artwork blur across refreshes when transcoding details are visible.
|
||||
* Fix: Send notifications to multiple XBMC/Plex Home Theater devices.
|
||||
* Fix: Reset PMS identifier when clicking verify server button in settings.
|
||||
* Fix: Crash when trying to group current activity session in database.
|
||||
* Fix: Check current activity returns sessions when refreshing.
|
||||
* Fix: Logs sorted out of order.
|
||||
* Fix: Resolution reported incorrectly in the stream info modal.
|
||||
* Fix: PlexPy crashing when hashing password in the config file.
|
||||
* Fix: CherryPy doubling the port number when accessing PlexPy locally with http_proxy enabled.
|
||||
* Change: Sort by most recent for ties in watch statistics.
|
||||
* Change: Refresh Join devices when changing the API key.
|
||||
* Change: Format the Join device IDs.
|
||||
* Change: Join notifications now sent with Python Requests module.
|
||||
* Change: Add paging for recently added in the API.
|
||||
|
||||
|
||||
## v1.4.0 (2016-05-15)
|
||||
|
||||
* New: An HTML form login page with sessions support.
|
||||
|
@@ -9,14 +9,14 @@ In case you read this because you are posting an issue, please take a minute and
|
||||
- Turning your device off and on again.
|
||||
- Analyzing your logs, you just might find the solution yourself!
|
||||
- Using the **search** function to see if this issue has already been reported/solved.
|
||||
- Checking the [Wiki](https://github.com/drzoidberg33/plexpy/wiki) for
|
||||
[ [Installation] ](https://github.com/drzoidberg33/plexpy/wiki/Installation) and
|
||||
[ [FAQs] ](https://github.com/drzoidberg33/plexpy/wiki/Frequently-Asked-Questions-(FAQ)).
|
||||
- For basic questions try asking on [Gitter](https://gitter.im/drzoidberg33/plexpy) or the [Plex Forums](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program) first before opening an issue.
|
||||
- Checking the [Wiki](https://github.com/JonnyWong16/plexpy/wiki) for
|
||||
[ [Installation] ](https://github.com/JonnyWong16/plexpy/wiki/Installation) and
|
||||
[ [FAQs] ](https://github.com/JonnyWong16/plexpy/wiki/Frequently-Asked-Questions-(FAQ)).
|
||||
- For basic questions try asking on [Gitter](https://gitter.im/plexpy/general) or the [Plex Forums](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program) first before opening an issue.
|
||||
|
||||
##### If nothing has worked:
|
||||
|
||||
1. Open a new issue on the GitHub [issue tracker](http://github.com/drzoidberg33/plexpy/issues).
|
||||
1. Open a new issue on the GitHub [issue tracker](http://github.com/JonnyWong16/plexpy/issues).
|
||||
2. Provide a clear title to easily help identify your problem.
|
||||
3. Use proper [markdown syntax](https://help.github.com/articles/github-flavored-markdown) to structure your post (i.e. code/log in code blocks).
|
||||
4. Make sure you provide the following information:
|
||||
@@ -35,7 +35,7 @@ In case you read this because you are posting an issue, please take a minute and
|
||||
|
||||
## Feature Requests
|
||||
|
||||
Feature requests are handled on [FeatHub](http://feathub.com/drzoidberg33/plexpy).
|
||||
Feature requests are handled on [FeatHub](http://feathub.com/JonnyWong16/plexpy).
|
||||
|
||||
1. Search the existing requests to see if your suggestion has already been submitted.
|
||||
2. If a similar request exists, give it a thumbs up (+1), or add additional comments to the request.
|
||||
|
@@ -1,14 +1,14 @@
|
||||
<!---
|
||||
Reporting Issues:
|
||||
* To ensure that a develpoer has enough information to work with please include all of the information below.
|
||||
* To ensure that a developer has enough information to work with please include all of the information below.
|
||||
Please provide as much detail as possible. Screenshots can be very useful to see the problem.
|
||||
* Use proper markdown syntax to structure your post (i.e. code/log in code blocks).
|
||||
See: https://help.github.com/articles/basic-writing-and-formatting-syntax/
|
||||
* Iclude a link to your **FULL** log file that has the error(not just a few lines!).
|
||||
* Include a link to your **FULL** log file that has the error(not just a few lines!).
|
||||
Please use [Gist](http://gist.github.com) or [Pastebin](http://pastebin.com/).
|
||||
|
||||
Feature Requests:
|
||||
* Feature requests are handled on FeatHub: http://feathub.com/drzoidberg33/plexpy
|
||||
* Feature requests are handled on FeatHub: http://feathub.com/JonnyWong16/plexpy
|
||||
* Do not post them on the GitHub issues tracker.
|
||||
-->
|
||||
|
||||
|
18
PlexPy.py
@@ -122,8 +122,21 @@ def main():
|
||||
# If the pidfile already exists, plexpy may still be running, so
|
||||
# exit
|
||||
if os.path.exists(plexpy.PIDFILE):
|
||||
raise SystemExit("PID file '%s' already exists. Exiting." %
|
||||
plexpy.PIDFILE)
|
||||
try:
|
||||
with open(plexpy.PIDFILE, 'r') as fp:
|
||||
pid = int(fp.read())
|
||||
os.kill(pid, 0)
|
||||
except IOError as e:
|
||||
raise SystemExit("Unable to read PID file: %s", e)
|
||||
except OSError:
|
||||
logger.warn("PID file '%s' already exists, but PID %d is " \
|
||||
"not running. Ignoring PID file." %
|
||||
(plexpy.PIDFILE, pid))
|
||||
else:
|
||||
# The pidfile exists and points to a live PID. plexpy may
|
||||
# still be running, so exit.
|
||||
raise SystemExit("PID file '%s' already exists. Exiting." %
|
||||
plexpy.PIDFILE)
|
||||
|
||||
# The pidfile is only useful in daemon mode, make sure we can write the
|
||||
# file properly
|
||||
@@ -214,6 +227,7 @@ def main():
|
||||
'https_key': plexpy.CONFIG.HTTPS_KEY,
|
||||
'http_username': plexpy.CONFIG.HTTP_USERNAME,
|
||||
'http_password': plexpy.CONFIG.HTTP_PASSWORD,
|
||||
'http_basic_auth': plexpy.CONFIG.HTTP_BASIC_AUTH
|
||||
}
|
||||
webstart.initialize(web_config)
|
||||
|
||||
|
30
README.md
@@ -1,13 +1,13 @@
|
||||
# PlexPy
|
||||
|
||||
[](https://gitter.im/drzoidberg33/plexpy?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://discord.gg/36ggawe)
|
||||
[](https://gitter.im/plexpy/general)
|
||||
[](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program)
|
||||
|
||||
A python based web application for monitoring, analytics and notifications for Plex Media Server (www.plex.tv).
|
||||
A python based web application for monitoring, analytics and notifications for [Plex Media Server](https://plex.tv).
|
||||
|
||||
This project is based on code from [Headphones](https://github.com/rembo10/headphones) and [PlexWatchWeb](https://github.com/ecleese/plexWatchWeb).
|
||||
|
||||
* PlexPy [forum thread](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program)
|
||||
|
||||
## Features
|
||||
|
||||
* Responsive web design viewable on desktop, tablet and mobile web browsers.
|
||||
@@ -25,10 +25,16 @@ This project is based on code from [Headphones](https://github.com/rembo10/headp
|
||||
* Full sync list data on all users syncing items from your library.
|
||||
* And many more!!
|
||||
|
||||
## Preview
|
||||
|
||||
* [Full preview gallery on Imgur](https://imgur.com/a/RwQPM)
|
||||
|
||||

|
||||
|
||||
## Installation and Support
|
||||
|
||||
* [Installation Guides](https://github.com/drzoidberg33/plexpy/wiki/Installation) shows you how to install PlexPy.
|
||||
* [FAQs](https://github.com/drzoidberg33/plexpy/wiki/Frequently-Asked-Questions-(FAQ)) in the wiki can help you with common problems.
|
||||
* [Installation Guides](https://github.com/JonnyWong16/plexpy/wiki/Installation) shows you how to install PlexPy.
|
||||
* [FAQs](https://github.com/JonnyWong16/plexpy/wiki/Frequently-Asked-Questions-(FAQ)) in the wiki can help you with common problems.
|
||||
|
||||
**Support** the project by implementing new features, solving support tickets and provide bug fixes.
|
||||
|
||||
@@ -40,14 +46,14 @@ This project is based on code from [Headphones](https://github.com/rembo10/headp
|
||||
- Turning your device off and on again.
|
||||
- Analyzing your logs, you just might find the solution yourself!
|
||||
- Using the **search** function to see if this issue has already been reported/solved.
|
||||
- Checking the [Wiki](https://github.com/drzoidberg33/plexpy/wiki) for
|
||||
[ [Installation] ](https://github.com/drzoidberg33/plexpy/wiki/Installation) and
|
||||
[ [FAQs] ](https://github.com/drzoidberg33/plexpy/wiki/Frequently-Asked-Questions-(FAQ)).
|
||||
- For basic questions try asking on [Gitter](https://gitter.im/drzoidberg33/plexpy) or the [Plex Forums](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program) first before opening an issue.
|
||||
- Checking the [Wiki](https://github.com/JonnyWong16/plexpy/wiki) for
|
||||
[ [Installation] ](https://github.com/JonnyWong16/plexpy/wiki/Installation) and
|
||||
[ [FAQs] ](https://github.com/JonnyWong16/plexpy/wiki/Frequently-Asked-Questions-(FAQ)).
|
||||
- For basic questions try asking on [Gitter](https://gitter.im/plexpy/general) or the [Plex Forums](https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program) first before opening an issue.
|
||||
|
||||
##### If nothing has worked:
|
||||
|
||||
1. Open a new issue on the GitHub [issue tracker](http://github.com/drzoidberg33/plexpy/issues).
|
||||
1. Open a new issue on the GitHub [issue tracker](http://github.com/JonnyWong16/plexpy/issues).
|
||||
2. Provide a clear title to easily help identify your problem.
|
||||
3. Use proper [markdown syntax](https://help.github.com/articles/github-flavored-markdown) to structure your post (i.e. code/log in code blocks).
|
||||
4. Make sure you provide the following information:
|
||||
@@ -66,7 +72,7 @@ This project is based on code from [Headphones](https://github.com/rembo10/headp
|
||||
|
||||
## Feature Requests
|
||||
|
||||
Feature requests are handled on [FeatHub](http://feathub.com/drzoidberg33/plexpy).
|
||||
Feature requests are handled on [FeatHub](http://feathub.com/JonnyWong16/plexpy).
|
||||
|
||||
1. Search the existing requests to see if your suggestion has already been submitted.
|
||||
2. If a similar request exists, give it a thumbs up (+1), or add additional comments to the request.
|
||||
|
@@ -30,7 +30,7 @@
|
||||
<div class="col-xs-4">
|
||||
<select id="table_name" class="form-control" name="table_name">
|
||||
<option value="processed">processed</option>
|
||||
<option value="processed">grouped</option>
|
||||
<option value="grouped">grouped</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -15,7 +15,7 @@
|
||||
<link href="${http_root}css/bootstrap3/bootstrap.css" rel="stylesheet">
|
||||
<link href="${http_root}css/pnotify.custom.min.css" rel="stylesheet" />
|
||||
<link href="${http_root}css/plexpy.css" rel="stylesheet">
|
||||
<link href="https://fonts.googleapis.com/css?family=Open+Sans:400,600" rel="stylesheet" type="text/css">
|
||||
<link href="${http_root}css/opensans.min.css" rel="stylesheet">
|
||||
<link href="${http_root}css/font-awesome.min.css" rel="stylesheet">
|
||||
${next.headIncludes()}
|
||||
|
||||
@@ -66,67 +66,67 @@
|
||||
|
||||
<!-- STARTUP IMAGES -->
|
||||
<!-- iPad retina portrait startup image -->
|
||||
<link href="${http_root}images/res/screen/ios/Default-Portrait@2x~ipad.png"
|
||||
<link href="${http_root}images/res/ios/Default-Portrait@2x~ipad.png"
|
||||
media="(device-width: 768px) and (device-height: 1024px)
|
||||
and (-webkit-device-pixel-ratio: 2)
|
||||
and (orientation: portrait)"
|
||||
rel="apple-touch-startup-image">
|
||||
|
||||
<!-- iPad retina landscape startup image -->
|
||||
<link href="${http_root}images/res/screen/ios/Default-Landscape@2x~ipad.png"
|
||||
<link href="${http_root}images/res/ios/Default-Landscape@2x~ipad.png"
|
||||
media="(device-width: 768px) and (device-height: 1024px)
|
||||
and (-webkit-device-pixel-ratio: 2)
|
||||
and (orientation: landscape)"
|
||||
rel="apple-touch-startup-image">
|
||||
|
||||
<!-- iPad non-retina portrait startup image -->
|
||||
<link href="${http_root}images/res/screen/ios/Default-Portrait~ipad.png"
|
||||
<link href="${http_root}images/res/ios/Default-Portrait~ipad.png"
|
||||
media="(device-width: 768px) and (device-height: 1024px)
|
||||
and (-webkit-device-pixel-ratio: 1)
|
||||
and (orientation: portrait)"
|
||||
rel="apple-touch-startup-image">
|
||||
|
||||
<!-- iPad non-retina landscape startup image -->
|
||||
<link href="${http_root}images/res/screen/ios/Default-Landscape~ipad.png"
|
||||
<link href="${http_root}images/res/ios/Default-Landscape~ipad.png"
|
||||
media="(device-width: 768px) and (device-height: 1024px)
|
||||
and (-webkit-device-pixel-ratio: 1)
|
||||
and (orientation: landscape)"
|
||||
rel="apple-touch-startup-image">
|
||||
|
||||
<!-- iPhone 6 Plus portrait startup image -->
|
||||
<link href="${http_root}images/res/screen/ios/Default-736h.png"
|
||||
<link href="${http_root}images/res/ios/Default-736h.png"
|
||||
media="(device-width: 414px) and (device-height: 736px)
|
||||
and (-webkit-device-pixel-ratio: 3)
|
||||
and (orientation: portrait)"
|
||||
rel="apple-touch-startup-image">
|
||||
|
||||
<!-- iPhone 6 Plus landscape startup image -->
|
||||
<link href="${http_root}images/res/screen/ios/Default-Landscape-736h.png"
|
||||
<link href="${http_root}images/res/ios/Default-Landscape-736h.png"
|
||||
media="(device-width: 414px) and (device-height: 736px)
|
||||
and (-webkit-device-pixel-ratio: 3)
|
||||
and (orientation: landscape)"
|
||||
rel="apple-touch-startup-image">
|
||||
|
||||
<!-- iPhone 6 startup image -->
|
||||
<link href="${http_root}images/res/screen/ios/Default-667h.png"
|
||||
<link href="${http_root}images/res/ios/Default-667h.png"
|
||||
media="(device-width: 375px) and (device-height: 667px)
|
||||
and (-webkit-device-pixel-ratio: 2)"
|
||||
rel="apple-touch-startup-image">
|
||||
|
||||
<!-- iPhone 5 startup image -->
|
||||
<link href="${http_root}images/res/screen/ios/Default-568h@2x~iphone5.jpg"
|
||||
<link href="${http_root}images/res/ios/Default-568h@2x~iphone5.jpg"
|
||||
media="(device-width: 320px) and (device-height: 568px)
|
||||
and (-webkit-device-pixel-ratio: 2)"
|
||||
rel="apple-touch-startup-image">
|
||||
|
||||
<!-- iPhone < 5 retina startup image -->
|
||||
<link href="${http_root}images/res/screen/ios/Default@2x~iphone.png"
|
||||
<link href="${http_root}images/res/ios/Default@2x~iphone.png"
|
||||
media="(device-width: 320px) and (device-height: 480px)
|
||||
and (-webkit-device-pixel-ratio: 2)"
|
||||
rel="apple-touch-startup-image">
|
||||
|
||||
<!-- iPhone < 5 non-retina startup image -->
|
||||
<link href="${http_root}images/res/screen/ios/Default~iphone.png"
|
||||
<link href="${http_root}images/res/ios/Default~iphone.png"
|
||||
media="(device-width: 320px) and (device-height: 480px)
|
||||
and (-webkit-device-pixel-ratio: 1)"
|
||||
rel="apple-touch-startup-image">
|
||||
@@ -170,7 +170,7 @@
|
||||
<form action="search" method="post" class="form" id="search_form">
|
||||
<div class="input-group">
|
||||
<span class="input-textbox">
|
||||
<input type="text" class="form-control" name="query" id="query" aria-label="Search" placeholder="Search..."/>
|
||||
<input type="text" class="form-control" name="query" id="query" aria-label="Search" placeholder="Search Plex library..."/>
|
||||
</span>
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-dark btn-inactive" type="submit" id="search_button"><i class="fa fa-search"></i></button>
|
||||
@@ -220,9 +220,10 @@
|
||||
<li><a href="settings"><i class="fa fa-fw fa-cogs"></i> Settings</a></li>
|
||||
<li role="separator" class="divider"></li>
|
||||
<li><a href="logs"><i class="fa fa-fw fa-list-alt"></i> View Logs</a></li>
|
||||
<li><a href="${anon_url('https://github.com/%s/plexpy/wiki/Frequently-Asked-Questions-(FAQ)' % plexpy.CONFIG.GIT_USER)}" target="_blank"><i class="fa fa-fw fa-question-circle"></i> FAQ</a></li>
|
||||
<li><a href="settings?support=true"><i class="fa fa-fw fa-comment"></i> Support</a></li>
|
||||
<li role="separator" class="divider"></li>
|
||||
<li><a href="${anon_url('https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=DG783BMSCU3V4')}" target="_blank"><i class="fa fa-fw fa-paypal"></i> Paypal</a></li>
|
||||
<li><a href="${anon_url('http://swiftpanda16.tip.me/')}" target="_blank"><i class="fa fa-fw fa-btc"></i> Bitcoin</a></li>
|
||||
<li><a href="#" data-target="#donate-modal" data-toggle="modal"><i class="fa fa-fw fa-heart"></i> Donate</a></li>
|
||||
<li role="separator" class="divider"></li>
|
||||
% if plexpy.CONFIG.CHECK_GITHUB:
|
||||
<li><a href="#" id="nav-update"><i class="fa fa-fw fa-arrow-circle-up"></i> Check for Updates</a></li>
|
||||
@@ -303,6 +304,64 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% else:
|
||||
<div id="donate-modal" class="modal fade" tabindex="-1" role="dialog" aria-labelledby="crypto-donate-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">PlexPy Donation</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="row">
|
||||
<div class="col-md-12" style="text-align: center;">
|
||||
<h4>
|
||||
<strong>Thank you for supporting PlexPy!</strong>
|
||||
</h4>
|
||||
<p>
|
||||
Please select a donation method.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<ul id="donation_type" class="nav nav-pills" role="tablist" style="display: flex; justify-content: center; margin: 10px 0;">
|
||||
<li class="active"><a href="#paypal-donation" role="tab" data-toggle="tab">PayPal</a></li>
|
||||
<li><a href="#flattr-donation" role="tab" data-toggle="tab">Flattr</a></li>
|
||||
<li><a href="#crypto-donation" role="tab" data-toggle="tab" class="crypto-donation" data-coin="bitcoin" data-name="Bitcoin" data-address="3FdfJAyNWU15Sf11U9FTgPHuP1hPz32eEN">Bitcoin</a></li>
|
||||
<li><a href="#crypto-donation" role="tab" data-toggle="tab" class="crypto-donation" data-coin="bitcoincash" data-name="Bitcoin Cash" data-address="1H2atabxAQGaFAWYQEiLkXKSnK9CZZvt2n">Bitcoin Cash</a></li>
|
||||
<li><a href="#crypto-donation" role="tab" data-toggle="tab" class="crypto-donation" data-coin="ethereum" data-name="Ethereum" data-address="0x77ae4c2b8de1a1ccfa93553db39971da58c873d3">Ethereum</a></li>
|
||||
<li><a href="#crypto-donation" role="tab" data-toggle="tab" class="crypto-donation" data-coin="litecoin" data-name="Litecoin" data-address="LWpPmUqQYHBhMV83XSCsHzPmKLhJt6r57J">Litecoin</a></li>
|
||||
</ul>
|
||||
<div class="tab-content">
|
||||
<div role="tabpanel" class="tab-pane active" id="paypal-donation" style="text-align: center">
|
||||
<p>
|
||||
Click the button below to continue to PayPal.
|
||||
</p>
|
||||
<a href="${anon_url('https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=6XPPKTDSX9QFL&lc=US&item_name=PlexPy¤cy_code=USD&bn=PP%2dDonationsBF%3abtn_donate_LG%2egif%3aNonHosted')}" target="_blank">
|
||||
<img src="images/gold-rect-paypal-34px.png" alt="PayPal">
|
||||
</a>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="flattr-donation" style="text-align: center">
|
||||
<p>
|
||||
Click the button below to continue to Flattr.
|
||||
</p>
|
||||
<a href="${anon_url('https://flattr.com/submit/auto?user_id=JonnyWong16&url=https://github.com/JonnyWong16/plexpy&title=PlexPy&language=en_GB&tags=github&category=software')}" target="_blank">
|
||||
<img src="images/flattr-badge-large.png" alt="Flattr">
|
||||
</a>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="crypto-donation">
|
||||
<label>QR Code</label>
|
||||
<pre id="crypto_qr_code" style="text-align: center"></pre>
|
||||
<label><span id="crypto_type_label"></span> Address</label>
|
||||
<pre id="crypto_address" style="text-align: center"></pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<input type="button" class="btn btn-bright" data-dismiss="modal" value="Close">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% endif
|
||||
|
||||
${next.headerIncludes()}
|
||||
@@ -315,8 +374,10 @@ ${next.headerIncludes()}
|
||||
<script src="${http_root}js/bootstrap-hover-dropdown.min.js"></script>
|
||||
<script src="${http_root}js/pnotify.custom.min.js"></script>
|
||||
<script src="${http_root}js/script.js"></script>
|
||||
<script src="${http_root}js/jquery.qrcode.min.js"></script>
|
||||
% if _session['user_group'] == 'admin' and plexpy.CONFIG.BROWSER_ENABLED:
|
||||
<script src="${http_root}js/ajaxNotifications.js"></script>
|
||||
% else:
|
||||
% endif
|
||||
<script>
|
||||
% if _session['user_group'] == 'admin':
|
||||
@@ -352,6 +413,17 @@ ${next.headerIncludes()}
|
||||
$(this).html('<i class="fa fa-spin fa-refresh"></i> Checking');
|
||||
window.location.href = "checkGithub";
|
||||
});
|
||||
|
||||
$('#donation_type a.crypto-donation').on('shown.bs.tab', function () {
|
||||
var crypto_coin = $(this).data('coin');
|
||||
var crypto_name = $(this).data('name');
|
||||
var crypto_address = $(this).data('address')
|
||||
$('#crypto_qr_code').empty().qrcode({
|
||||
text: crypto_coin + ":" + crypto_address
|
||||
});
|
||||
$('#crypto_type_label').html(crypto_name);
|
||||
$('#crypto_address').html(crypto_address);
|
||||
});
|
||||
% endif
|
||||
|
||||
$('.dropdown-toggle').click(function (e) {
|
||||
|
172
data/interfaces/default/configuration_table.html
Normal file
@@ -0,0 +1,172 @@
|
||||
<%doc>
|
||||
USAGE DOCUMENTATION :: PLEASE LEAVE THIS AT THE TOP OF THIS FILE
|
||||
|
||||
For Mako templating syntax documentation please visit: http://docs.makotemplates.org/en/latest/
|
||||
|
||||
Filename: configuration_table.html
|
||||
Version: 0.1
|
||||
|
||||
DOCUMENTATION :: END
|
||||
</%doc>
|
||||
|
||||
<%!
|
||||
import os
|
||||
import sys
|
||||
import plexpy
|
||||
from plexpy import common, logger
|
||||
from plexpy.helpers import anon_url
|
||||
%>
|
||||
|
||||
<table class="config-info-table small-muted">
|
||||
<tbody>
|
||||
% if plexpy.CURRENT_VERSION:
|
||||
<tr>
|
||||
<td>Git Branch:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/%s/plexpy/tree/%s' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_BRANCH))}">${plexpy.CONFIG.GIT_BRANCH}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Git Commit Hash:</td>
|
||||
<td><a class="no-highlight" href="${anon_url('https://github.com/%s/plexpy/commit/%s' % (plexpy.CONFIG.GIT_USER, plexpy.CONFIG.GIT_BRANCH))}">${plexpy.CURRENT_VERSION}</a></td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Configuration File:</td>
|
||||
<td>${plexpy.CONFIG_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Database File:</td>
|
||||
<td>${plexpy.DB_FILE}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Log File:</td>
|
||||
<td><a class="no-highlight" href="logFile" target="_blank">${os.path.join(plexpy.CONFIG.LOG_DIR, logger.FILENAME)}</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Backup Directory:</td>
|
||||
<td>${plexpy.CONFIG.BACKUP_DIR}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Cache Directory:</td>
|
||||
<td>${plexpy.CONFIG.CACHE_DIR}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>GeoLite2 Database:</td>
|
||||
% if plexpy.CONFIG.GEOIP_DB:
|
||||
<td>${plexpy.CONFIG.GEOIP_DB} | <a class="no-highlight" href="#" id="reinstall_geoip_db">Reinstall / Update</a> | <a class="no-highlight" href="#" id="uninstall_geoip_db">Uninstall</a></td>
|
||||
% else:
|
||||
<td><a class="no-highlight" href="#" id="install_geoip_db">Click here to install the GeoLite2 database.</a></td>
|
||||
% endif
|
||||
</tr>
|
||||
% if plexpy.ARGS:
|
||||
<tr>
|
||||
<td>Arguments:</td>
|
||||
<td>${plexpy.ARGS}</td>
|
||||
</tr>
|
||||
% endif
|
||||
<tr>
|
||||
<td>Platform:</td>
|
||||
<td>${common.PLATFORM} ${common.PLATFORM_VERSION}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Python Version:</td>
|
||||
<td>${sys.version}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="top-line">Resources:</td>
|
||||
<td class="top-line">
|
||||
<a id="source-link" class="no-highlight" href="${anon_url('https://github.com/%s/plexpy' % plexpy.CONFIG.GIT_USER)}" target="_blank">GitHub Source</a> |
|
||||
<a class="no-highlight guidelines-modal-link" href="${anon_url('https://github.com/%s/plexpy/issues' % plexpy.CONFIG.GIT_USER)}" data-id="issue">GitHub Issues</a> |
|
||||
<a class="no-highlight guidelines-modal-link" href="${anon_url('http://feathub.com/%s/plexpy' % plexpy.CONFIG.GIT_USER)}" data-id="feature request">FeatHub Feature Requests</a> |
|
||||
<a class="no-highlight" href="${anon_url('https://github.com/%s/plexpy/wiki' % plexpy.CONFIG.GIT_USER)}" target="_blank">PlexPy Wiki</a> |
|
||||
<a id="faq-source-link" class="no-highlight" href="${anon_url('https://github.com/%s/plexpy/wiki/Frequently-Asked-Questions-(FAQ)' % plexpy.CONFIG.GIT_USER)}" target="_blank">PlexPy FAQ</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Support:</td>
|
||||
<td>
|
||||
<a class="no-highlight support-modal-link" href="${anon_url('https://forums.plex.tv/discussion/169591/plexpy-another-plex-monitoring-program')}" target="_blank">Plex Forums</a> |
|
||||
<a class="no-highlight support-modal-link" href="${anon_url('https://gitter.im/plexpy/general')}" target="_blank">PlexPy Gitter Chat</a> |
|
||||
<a id="best-support-link" class="no-highlight support-modal-link" href="${anon_url('https://discord.gg/011TFFWSuNFI02EKr')}" target="_blank">/r/Plex Discord Server</a> |
|
||||
<a class="no-highlight support-modal-link" href="${anon_url('https://discord.gg/36ggawe')}" target="_blank">PlexPy Discord Server</a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div id="guidelines-modal" class="modal fade" tabindex="-1" role="dialog" aria-labelledby="guidelines-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">Guidelines</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div style="text-align: center; margin-top: 20px; margin-bottom: 20px;">
|
||||
<strong>Please read the <a href="#" target="_blank" id="guidelines-link">guidelines</a> in the README document <br />before submitting a new <span id="guidelines-type"></span>!</strong>
|
||||
<br /><br />
|
||||
Your post may be removed for failure to follow the guidelines.
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="#" target="_blank" id="guidelines-continue" class="btn btn-bright">Continue</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="support-modal" class="modal fade" tabindex="-1" role="dialog" aria-labelledby="support-modal">
|
||||
<div class="modal-dialog" role="document">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title">Support</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div style="text-align: center; margin-top: 20px; margin-bottom: 20px;">
|
||||
<strong>Please read the <a href="#" target="_blank" id="faq-link">FAQ</a> before asking for help!</strong>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="#" target="_blank" id="support-continue" class="btn btn-bright">Continue</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$("#install_geoip_db, #reinstall_geoip_db").click(function () {
|
||||
var msg = 'Are you sure you want to install the GeoLite2 database?<br /><br />' +
|
||||
'The database is used to lookup IP address geolocation info.<br />' +
|
||||
'The database will be downloaded from <a href="${anon_url("https://dev.maxmind.com/geoip/geoip2/geolite2/")}" target="_blank">MaxMind</a>, <br />' +
|
||||
'and requires <strong>100MB</strong> of free space to install in your PlexPy directory.<br />'
|
||||
var url = 'install_geoip_db';
|
||||
confirmAjaxCall(url, msg, 'Installing GeoLite2 database.', getConfigurationTable);
|
||||
});
|
||||
|
||||
$("#uninstall_geoip_db").click(function () {
|
||||
var msg = 'Are you sure you want to uninstall the GeoLite2 database?<br /><br />' +
|
||||
'You will not be able to lookup IP address geolocation info.';
|
||||
var url = 'uninstall_geoip_db';
|
||||
confirmAjaxCall(url, msg, 'Uninstalling GeoLite2 database.', getConfigurationTable);
|
||||
});
|
||||
|
||||
$('.guidelines-modal-link').on('click', function (e) {
|
||||
e.preventDefault();
|
||||
$('#guidelines-link').attr('href', $('#source-link').attr('href'));
|
||||
$('#guidelines-type').text($(this).data('id'))
|
||||
$('#guidelines-modal').modal();
|
||||
$('#guidelines-continue').attr('href', $(this).attr('href')).on('click', function () {
|
||||
$('#guidelines-modal').modal('hide');
|
||||
});
|
||||
});
|
||||
$('.support-modal-link').on('click', function (e) {
|
||||
e.preventDefault();
|
||||
$('#faq-link').attr('href', $('#faq-source-link').attr('href'));
|
||||
$('#support-modal').modal();
|
||||
$('#support-continue').attr('href', $(this).attr('href')).on('click', function () {
|
||||
$('#support-modal').modal('hide');
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
1
data/interfaces/default/css/opensans.min.css
vendored
Normal file
@@ -0,0 +1 @@
|
||||
@font-face{font-family:'Open Sans';font-weight:400;font-style:normal;src:url(../fonts/Open-Sans-regular/Open-Sans-regular.eot);src:url(../fonts/Open-Sans-regular/Open-Sans-regular.eot?#iefix) format('embedded-opentype'),local('Open Sans'),local('Open-Sans-regular'),url(../fonts/Open-Sans-regular/Open-Sans-regular.woff2) format('woff2'),url(../fonts/Open-Sans-regular/Open-Sans-regular.woff) format('woff'),url(../fonts/Open-Sans-regular/Open-Sans-regular.ttf) format('truetype'),url(../fonts/Open-Sans-regular/Open-Sans-regular.svg#OpenSans) format('svg')}@font-face{font-family:'Open Sans';font-weight:600;font-style:normal;src:url(../fonts/Open-Sans-600/Open-Sans-600.eot);src:url(../fonts/Open-Sans-600/Open-Sans-600.eot?#iefix) format('embedded-opentype'),local('Open Sans Semibold'),local('Open-Sans-600'),url(../fonts/Open-Sans-600/Open-Sans-600.woff2) format('woff2'),url(../fonts/Open-Sans-600/Open-Sans-600.woff) format('woff'),url(../fonts/Open-Sans-600/Open-Sans-600.ttf) format('truetype'),url(../fonts/Open-Sans-600/Open-Sans-600.svg#OpenSans) format('svg')}
|
@@ -598,6 +598,7 @@ a .users-poster-face:hover {
|
||||
}
|
||||
.dashboard-instance.hover .bar {
|
||||
height: 14px;
|
||||
max-width: 100%;
|
||||
transform-origin: top;
|
||||
transition: all .2s ease;
|
||||
border-radius: 0px 0px 3px 3px;
|
||||
@@ -608,6 +609,7 @@ a .users-poster-face:hover {
|
||||
}
|
||||
.dashboard-instance.hover .bufferbar {
|
||||
height: 14px;
|
||||
max-width: 100%;
|
||||
transform-origin: top;
|
||||
transition: all .2s ease;
|
||||
border-radius: 0px 0px 3px 3px;
|
||||
@@ -836,6 +838,7 @@ a .users-poster-face:hover {
|
||||
background-color: #444;
|
||||
position: absolute;
|
||||
height: 6px;
|
||||
max-width: 100%;
|
||||
overflow: hidden;
|
||||
}
|
||||
.dashboard-activity-progress .bar {
|
||||
@@ -853,6 +856,7 @@ a .users-poster-face:hover {
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffbb450', endColorstr='#fff89406', GradientType=0);
|
||||
position: absolute;
|
||||
height: 6px;
|
||||
max-width: 100%;
|
||||
overflow: hidden;
|
||||
}
|
||||
.dashboard-activity-metadata-wrapper {
|
||||
@@ -1184,6 +1188,7 @@ a:hover .dashboard-recent-media-cover {
|
||||
margin: 0 40px 0 25px;
|
||||
height: 100px;
|
||||
overflow: visible;
|
||||
position: relative;
|
||||
}
|
||||
.summary-poster-face {
|
||||
background-position: center;
|
||||
@@ -1922,6 +1927,13 @@ a .library-user-instance-box:hover {
|
||||
.home-platforms-instance-poster {
|
||||
margin-left: 0px;
|
||||
position: absolute;
|
||||
overflow: hidden;
|
||||
}
|
||||
.home-platforms-instance-cover {
|
||||
margin-left: 0px;
|
||||
position: absolute;
|
||||
top: 20px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.home-platforms-instance-poster .home-platforms-poster-face {
|
||||
background-position: center;
|
||||
@@ -1932,6 +1944,15 @@ a .library-user-instance-box:hover {
|
||||
-moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
}
|
||||
.home-platforms-instance-cover .home-platforms-cover-face {
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
height: 80px;
|
||||
width: 80px;
|
||||
-webkit-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
-moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
}
|
||||
.home-platforms-instance-poster .home-platforms-library-thumb {
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
@@ -2079,6 +2100,13 @@ a .library-user-instance-box:hover {
|
||||
.home-platforms-instance-list-poster {
|
||||
position: absolute;
|
||||
left: 20px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.home-platforms-instance-list-cover {
|
||||
position: absolute;
|
||||
top: 10px;
|
||||
left: 20px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.home-platforms-instance-list-poster .home-platforms-list-poster-face {
|
||||
background-position: center;
|
||||
@@ -2089,6 +2117,15 @@ a .library-user-instance-box:hover {
|
||||
-moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
}
|
||||
.home-platforms-instance-list-cover .home-platforms-list-cover-face {
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
height: 40px;
|
||||
width: 40px;
|
||||
-webkit-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
-moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);
|
||||
}
|
||||
.home-platforms-instance-list-box {
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
@@ -2147,7 +2184,9 @@ a .home-platforms-instance-oval:hover,
|
||||
a .home-platforms-instance-list-box:hover,
|
||||
a .home-platforms-instance-list-oval:hover,
|
||||
a .home-platforms-poster-face:hover,
|
||||
a .home-platforms-list-poster-face:hover
|
||||
a .home-platforms-cover-face:hover,
|
||||
a .home-platforms-list-poster-face:hover,
|
||||
a .home-platforms-list-cover-face:hover
|
||||
{
|
||||
-webkit-box-shadow: inset 0 0 0 2px #e9a049;
|
||||
-moz-box-shadow: inset 0 0 0 2px #e9a049;
|
||||
@@ -2212,7 +2251,8 @@ a .home-platforms-list-poster-face:hover
|
||||
margin-right: auto;
|
||||
margin-top: 20px;
|
||||
margin-bottom: 20px;
|
||||
width: 90%;
|
||||
width: 100%;
|
||||
max-width: 1750px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.table-card-header {
|
||||
@@ -2224,7 +2264,8 @@ a .home-platforms-list-poster-face:hover
|
||||
margin-right: auto;
|
||||
margin-top: 20px;
|
||||
margin-bottom: -20px;
|
||||
width: 90%;
|
||||
width: 100%;
|
||||
max-width: 1750px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.table-card-back td {
|
||||
@@ -2450,6 +2491,9 @@ a .home-platforms-list-poster-face:hover
|
||||
.dataTables_paginate li {
|
||||
margin: 0;
|
||||
}
|
||||
div.dataTables_info {
|
||||
white-space: normal !important;
|
||||
}
|
||||
.tooltip.top .tooltip-arrow {
|
||||
border-top-color: #fff;
|
||||
}
|
||||
@@ -2623,7 +2667,7 @@ a .home-platforms-list-poster-face:hover
|
||||
|
||||
@media only screen
|
||||
and (min-device-width: 300px)
|
||||
and (max-device-width: 400px) {
|
||||
and (max-device-width: 450px) {
|
||||
.home-platforms-instance {
|
||||
width: calc(100% - 20px);
|
||||
}
|
||||
@@ -2703,6 +2747,13 @@ div[id^='media_info_child'] div[id^='media_info_child'] div.dataTables_scrollHea
|
||||
.dataTables_scrollBody {
|
||||
-webkit-overflow-scrolling: touch;
|
||||
}
|
||||
.current-activity-row {
|
||||
background-color: rgba(255,255,255,.1) !important;
|
||||
}
|
||||
.current-activity-row:hover {
|
||||
background-color: rgba(255,255,255,0.125) !important;
|
||||
}
|
||||
|
||||
#search_form {
|
||||
width: 300px;
|
||||
padding: 8px 15px;
|
||||
@@ -2785,6 +2836,14 @@ div[id^='media_info_child'] div[id^='media_info_child'] div.dataTables_scrollHea
|
||||
width: 75px;
|
||||
height: 34px;
|
||||
}
|
||||
#months-selection label {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
#graph-months {
|
||||
margin: 0;
|
||||
width: 75px;
|
||||
height: 34px;
|
||||
}
|
||||
.card-sortable {
|
||||
height: 36px;
|
||||
padding: 0 20px 0 0;
|
||||
@@ -2954,4 +3013,68 @@ a.no-highlight:hover {
|
||||
.datatable-wrap {
|
||||
min-width: 150px;
|
||||
max-width: 250px;
|
||||
}
|
||||
}
|
||||
.inline-pre {
|
||||
font-family: monospace;
|
||||
margin: 0 2px;
|
||||
padding: 2px 5px;
|
||||
font-size: 13px;
|
||||
color: #fff;
|
||||
background-color: #555;
|
||||
border: 0px solid #444;
|
||||
border-radius: 3px;
|
||||
}
|
||||
.overlay-refresh-image {
|
||||
opacity: 0;
|
||||
color: #000;
|
||||
font-size: 16px;
|
||||
float: left;
|
||||
position: absolute;
|
||||
top: 0px;
|
||||
right: 10px;
|
||||
z-index: 1;
|
||||
transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
-webkit-transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
-moz-transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
-o-transition: all .1s cubic-bezier(.4,0,1,1);
|
||||
text-shadow: -1px -1px 0 #fff, 1px -1px 0 #fff, -1px 1px 0 #fff, 1px 1px 0 #fff;
|
||||
}
|
||||
.overlay-refresh-image.left {
|
||||
left: 10px;
|
||||
}
|
||||
.overlay-refresh-image.info-art {
|
||||
color: #999;
|
||||
top: 15px;
|
||||
right: 25px;
|
||||
opacity: 1;
|
||||
text-shadow: none;
|
||||
cursor: pointer;
|
||||
}
|
||||
.overlay-refresh-image.info-art:hover {
|
||||
color: #fff;
|
||||
text-shadow: none;
|
||||
}
|
||||
a:hover .overlay-refresh-image {
|
||||
opacity: .25;
|
||||
top: 8px;
|
||||
}
|
||||
a:hover .overlay-refresh-image:hover {
|
||||
opacity: .9;
|
||||
}
|
||||
#ip_error, #isp_error {
|
||||
color: #aaa;
|
||||
display: none;
|
||||
text-align: center;
|
||||
padding-top: 10px;
|
||||
padding-bottom: 10px;
|
||||
}
|
||||
#plexpy-log-levels label,
|
||||
#plex-log-levels label {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
#api_key.form-control[readonly] {
|
||||
background-color: #555;
|
||||
}
|
||||
#api_key.form-control[readonly]:focus {
|
||||
background-color: #fff;
|
||||
}
|
||||
|
@@ -106,6 +106,9 @@ DOCUMENTATION :: END
|
||||
% else:
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(${a['art']});"></div>
|
||||
% endif
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="dashboard-activity-button-info">
|
||||
<button type="button" class="btn btn-activity-info btn-lg" data-target="#stream-${a['session_key']}">
|
||||
<i class="fa fa-info-circle"></i>
|
||||
|
@@ -61,6 +61,8 @@ DOCUMENTATION :: END
|
||||
|
||||
% if data is not None:
|
||||
<%
|
||||
from urllib import quote
|
||||
|
||||
from plexpy import helpers
|
||||
data['indexes'] = helpers.cast_to_int(data['indexes'])
|
||||
%>
|
||||
@@ -71,7 +73,7 @@ DOCUMENTATION :: END
|
||||
% else:
|
||||
<a href="#">
|
||||
% endif
|
||||
<div class="dashboard-activity-poster">
|
||||
<div class="dashboard-activity-poster" id="poster-${data['session_key']}">
|
||||
% if not data['art'].startswith('interfaces') or not data['thumb'].startswith('interfaces'):
|
||||
% if (data['media_type'] == 'movie' and not data['indexes']) or (data['indexes'] and not data['view_offset']):
|
||||
<div id="bif-${data['session_key']}" class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${data['art']}&width=500&height=280&fallback=art);"></div>
|
||||
@@ -90,9 +92,11 @@ DOCUMENTATION :: END
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(${data['thumb']});"></div>
|
||||
% else:
|
||||
% if data['art']:
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${data['art']}&width=500&height=280&fallback=art);"></div>
|
||||
<!--Hacky solution to escape the image url until I come up with something better-->
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${quote(data['art'])}&width=500&height=280&fallback=art);"></div>
|
||||
% else:
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${data['thumb']}&width=500&height=280&fallback=art);"></div>
|
||||
<!--Hacky solution to escape the image url until I come up with something better-->
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img=${quote(data['thumb'])}&width=500&height=280&fallback=art);"></div>
|
||||
% endif
|
||||
% endif
|
||||
% elif data['media_type'] == 'photo':
|
||||
@@ -104,8 +108,11 @@ DOCUMENTATION :: END
|
||||
% else:
|
||||
<div class="dashboard-activity-poster-face" style="background-image: url(${data['art']});"></div>
|
||||
% endif
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image left" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="dashboard-activity-button-info">
|
||||
<button type="button" class="btn btn-activity-info btn-lg" data-target="#stream-${data['session_key']}">
|
||||
<button type="button" class="btn btn-activity-info btn-lg" data-target="#stream-${data['session_key']}" data-id="${data['session_key']}">
|
||||
<i class="fa fa-info-circle"></i>
|
||||
</button>
|
||||
</div>
|
||||
@@ -128,79 +135,39 @@ DOCUMENTATION :: END
|
||||
% endif
|
||||
</span>
|
||||
</div>
|
||||
% if data['media_type'] == 'track':
|
||||
% if data['audio_decision'] == 'direct play':
|
||||
Stream <strong>Direct Play</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>
|
||||
Transcoding
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
(Speed: ${data['transcode_speed']})
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
% if data['video_decision'] == 'transcode' or data['audio_decision'] == 'transcode':
|
||||
Stream <strong>Transcode (Speed: ${data['transcode_speed']})
|
||||
% if data['throttled'] == '1':
|
||||
(Throttled)
|
||||
% endif
|
||||
</span>
|
||||
</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['audio_decision'] == 'direct play':
|
||||
Audio <strong>Direct Play (${data['audio_codec']}) (${data['audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Audio <strong>Direct Stream (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'transcode':
|
||||
Audio <strong>Transcode (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% endif
|
||||
% elif data['media_type'] == 'episode' or data['media_type'] == 'movie' or data['media_type'] == 'clip':
|
||||
% if data['video_decision'] == 'direct play' and data['audio_decision'] == 'direct play':
|
||||
Stream <strong>Direct Play</strong>
|
||||
% elif data['video_decision'] == 'copy' and data['audio_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>
|
||||
Transcoding
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
(Speed: ${data['transcode_speed']})
|
||||
% if data['throttled'] == '1':
|
||||
(Throttled)
|
||||
</strong>
|
||||
% elif data['video_decision'] == 'copy' or data['audio_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>Direct Play</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['video_decision'] and data['media_type'] != 'photo':
|
||||
% if data['video_decision'] == 'transcode':
|
||||
Video <strong>Transcode (${data['transcode_video_codec']}) (${data['transcode_width']}x${data['transcode_height']})</strong>
|
||||
% elif data['video_decision'] == 'copy':
|
||||
Video <strong>Direct Stream (${data['transcode_video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% else:
|
||||
Video <strong>Direct Play (${data['video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% endif
|
||||
</span>
|
||||
</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['video_decision'] == 'direct play':
|
||||
Video <strong>Direct Play (${data['video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% elif data['video_decision'] == 'copy':
|
||||
Video <strong>Direct Stream (${data['transcode_video_codec']}) (${data['width']}x${data['height']})</strong>
|
||||
% elif data['video_decision'] == 'transcode':
|
||||
Video <strong>Transcode (${data['transcode_video_codec']}) (${data['transcode_width']}x${data['transcode_height']})</strong>
|
||||
% endif
|
||||
<br />
|
||||
% if data['audio_decision'] == 'direct play':
|
||||
Audio <strong>Direct Play (${data['audio_codec']}) (${data['audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Audio <strong>Direct Stream (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'transcode':
|
||||
Audio <strong>Transcode (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% endif
|
||||
% elif data['media_type'] == 'photo':
|
||||
% if data['video_decision'] == 'direct play':
|
||||
Stream <strong>Direct Play</strong>
|
||||
% elif data['video_decision'] == 'copy':
|
||||
Stream <strong>Direct Stream</strong>
|
||||
% else:
|
||||
Stream <strong>
|
||||
<span id="transcode-state-${data['session_key']}">
|
||||
(Speed: ${data['transcode_speed']})
|
||||
% if data['throttled'] == '1':
|
||||
(Throttled)
|
||||
<br />
|
||||
% endif
|
||||
% if data['audio_decision']:
|
||||
% if data['audio_decision'] == 'transcode':
|
||||
Audio <strong>Transcode (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% elif data['audio_decision'] == 'copy':
|
||||
Audio <strong>Direct Stream (${data['transcode_audio_codec']}) (${data['transcode_audio_channels']}ch)</strong>
|
||||
% else:
|
||||
Audio <strong>Direct Play (${data['audio_codec']}) (${data['audio_channels']}ch)</strong>
|
||||
% endif
|
||||
</span>
|
||||
</strong>
|
||||
% endif
|
||||
% endif
|
||||
<br>
|
||||
% endif
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
% if data['media_type'] != 'photo':
|
||||
@@ -240,8 +207,8 @@ DOCUMENTATION :: END
|
||||
% endif
|
||||
<div class="dashboard-activity-progress">
|
||||
<div class="dashboard-activity-progress-bar">
|
||||
<div id="bufferbar-${data['session_key']}" class="bufferbar" style="width: ${data['transcode_progress']}%" data-toggle="tooltip" title="Transcoder Progress">${data['transcode_progress']}%</div>
|
||||
<div id="bar-${data['session_key']}" class="bar" style="width: ${data['progress_percent']}%" data-toggle="tooltip" title="Stream Progress">${data['progress_percent']}%</div>
|
||||
<div id="bufferbar-${data['session_key']}" class="bufferbar" style="width: ${data['transcode_progress']}%" data-toggle="tooltip" title="Transcoder Progress ${data['transcode_progress']}%">${data['transcode_progress']}%</div>
|
||||
<div id="bar-${data['session_key']}" class="bar" style="width: ${data['progress_percent']}%" data-toggle="tooltip" title="Stream Progress ${data['progress_percent']}%">${data['progress_percent']}%</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -45,7 +45,7 @@ DOCUMENTATION :: END
|
||||
<input type="text" class="form-control" id="friendly_name" name="friendly_name" value="${data['friendly_name']}" size="30">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Replace all occurances of the username with this name.</p>
|
||||
<p class="help-block">Replace all occurrences of the username with this name.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="profile_url">Profile Picture URL</label>
|
||||
|
202
data/interfaces/default/fonts/Open-Sans-600/LICENSE.txt
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.eot
Normal file
1637
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.svg
Normal file
After Width: | Height: | Size: 104 KiB |
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.ttf
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff
Normal file
BIN
data/interfaces/default/fonts/Open-Sans-600/Open-Sans-600.woff2
Normal file
202
data/interfaces/default/fonts/Open-Sans-regular/LICENSE.txt
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
After Width: | Height: | Size: 105 KiB |
@@ -42,6 +42,11 @@
|
||||
<input type="number" name="graph-days" id="graph-days" value="${config['graph_days']}" min="1" /> days
|
||||
</label>
|
||||
</div>
|
||||
<div class="btn-group" id="months-selection">
|
||||
<label>
|
||||
<input type="number" name="graph-months" id="graph-months" value="${config['graph_months']}" min="1" /> months
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class='table-card-back'>
|
||||
@@ -226,7 +231,7 @@
|
||||
% endif
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<h4><i class="fa fa-calendar"></i> Plays by Month <small>Last 12 months</small></h4>
|
||||
<h4><i class="fa fa-calendar"></i> Plays by month <small>Last <span class="months">12</span> months</small></h4>
|
||||
<p class="help-block">
|
||||
The combined total of tv, movies, and music by month.
|
||||
</p>
|
||||
@@ -317,10 +322,12 @@
|
||||
|
||||
// Initial values for graph from config
|
||||
var yaxis = "${config['graph_type']}";
|
||||
var current_range = ${config['graph_days']};
|
||||
var current_day_range = ${config['graph_days']};
|
||||
var current_month_range = ${config['graph_months']};
|
||||
var current_tab = "${'#' + config['graph_tab']}";
|
||||
|
||||
$('.days').html(current_range);
|
||||
$('.days').html(current_day_range);
|
||||
$('.months').html(current_month_range);
|
||||
|
||||
// Load user ids and names (for the selector)
|
||||
$.ajax({
|
||||
@@ -341,9 +348,18 @@
|
||||
|
||||
|
||||
var music_visible = (${config['music_logging_enable']} == 1 ? true : false);
|
||||
|
||||
function dataSecondsToHours(data) {
|
||||
$.each(data.series, function (i, series) {
|
||||
series.data = $.map(series.data, function (value) {
|
||||
return value / 60 / 60;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function loadGraphsTab1(time_range, yaxis) {
|
||||
$('#days-selection').show();
|
||||
$('#months-selection').hide();
|
||||
|
||||
setGraphFormat(yaxis);
|
||||
|
||||
@@ -354,18 +370,19 @@
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
var dateArray = [];
|
||||
for (var i = 0; i < data.categories.length; i++) {
|
||||
dateArray.push(moment(data.categories[i], 'YYYY-MM-DD').valueOf());
|
||||
$.each(data.categories, function (i, day) {
|
||||
dateArray.push(moment(day, 'YYYY-MM-DD').valueOf());
|
||||
// Highlight the weekend
|
||||
if ((moment(data.categories[i], 'YYYY-MM-DD').format('ddd') == 'Sat') ||
|
||||
(moment(data.categories[i], 'YYYY-MM-DD').format('ddd') == 'Sun')) {
|
||||
if ((moment(day, 'YYYY-MM-DD').format('ddd') == 'Sat') ||
|
||||
(moment(day, 'YYYY-MM-DD').format('ddd') == 'Sun')) {
|
||||
hc_plays_by_day_options.xAxis.plotBands.push({
|
||||
from: i-0.5,
|
||||
to: i+0.5,
|
||||
color: 'rgba(80,80,80,0.3)'
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_day_options.yAxis.min = 0;
|
||||
hc_plays_by_day_options.xAxis.categories = dateArray;
|
||||
hc_plays_by_day_options.series = data.series;
|
||||
@@ -380,6 +397,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_dayofweek_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_dayofweek_options.series = data.series;
|
||||
hc_plays_by_dayofweek_options.series[2].visible = music_visible;
|
||||
@@ -393,6 +411,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_hourofday_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_hourofday_options.series = data.series;
|
||||
hc_plays_by_hourofday_options.series[2].visible = music_visible;
|
||||
@@ -406,6 +425,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_platform_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_platform_options.series = data.series;
|
||||
hc_plays_by_platform_options.series[2].visible = music_visible;
|
||||
@@ -419,6 +439,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_user_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_user_options.series = data.series;
|
||||
hc_plays_by_user_options.series[2].visible = music_visible;
|
||||
@@ -429,6 +450,7 @@
|
||||
|
||||
function loadGraphsTab2(time_range, yaxis) {
|
||||
$('#days-selection').show();
|
||||
$('#months-selection').hide();
|
||||
|
||||
setGraphFormat(yaxis);
|
||||
|
||||
@@ -439,18 +461,19 @@
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
var dateArray = [];
|
||||
for (var i = 0; i < data.categories.length; i++) {
|
||||
dateArray.push(moment(data.categories[i], 'YYYY-MM-DD').valueOf());
|
||||
$.each(data.categories, function (i, day) {
|
||||
dateArray.push(moment(day, 'YYYY-MM-DD').valueOf());
|
||||
// Highlight the weekend
|
||||
if ((moment(data.categories[i], 'YYYY-MM-DD').format('ddd') == 'Sat') ||
|
||||
(moment(data.categories[i], 'YYYY-MM-DD').format('ddd') == 'Sun')) {
|
||||
hc_plays_by_stream_type_options.xAxis.plotBands.push({
|
||||
if ((moment(day, 'YYYY-MM-DD').format('ddd') == 'Sat') ||
|
||||
(moment(day, 'YYYY-MM-DD').format('ddd') == 'Sun')) {
|
||||
hc_plays_by_day_options.xAxis.plotBands.push({
|
||||
from: i-0.5,
|
||||
to: i+0.5,
|
||||
color: 'rgba(80,80,80,0.3)'
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_stream_type_options.yAxis.min = 0;
|
||||
hc_plays_by_stream_type_options.xAxis.categories = dateArray;
|
||||
hc_plays_by_stream_type_options.series = data.series;
|
||||
@@ -464,6 +487,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_source_resolution_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_source_resolution_options.series = data.series;
|
||||
var hc_plays_by_source_resolution = new Highcharts.Chart(hc_plays_by_source_resolution_options);
|
||||
@@ -476,6 +500,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_stream_resolution_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_stream_resolution_options.series = data.series;
|
||||
var hc_plays_by_stream_resolution = new Highcharts.Chart(hc_plays_by_stream_resolution_options);
|
||||
@@ -488,6 +513,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_platform_by_stream_type_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_platform_by_stream_type_options.series = data.series;
|
||||
var hc_plays_by_platform_by_stream_type = new Highcharts.Chart(hc_plays_by_platform_by_stream_type_options);
|
||||
@@ -500,6 +526,7 @@
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_user_by_stream_type_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_user_by_stream_type_options.series = data.series;
|
||||
var hc_plays_by_user_by_stream_type = new Highcharts.Chart(hc_plays_by_user_by_stream_type_options);
|
||||
@@ -507,17 +534,19 @@
|
||||
});
|
||||
}
|
||||
|
||||
function loadGraphsTab3(yaxis) {
|
||||
function loadGraphsTab3(time_range, yaxis) {
|
||||
$('#days-selection').hide();
|
||||
$('#months-selection').show();
|
||||
|
||||
setGraphFormat(yaxis);
|
||||
|
||||
$.ajax({
|
||||
url: "get_plays_per_month",
|
||||
type: 'get',
|
||||
data: { y_axis: yaxis, user_id: selected_user_id },
|
||||
data: { time_range: time_range, y_axis: yaxis, user_id: selected_user_id },
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
if (yaxis === 'duration') { dataSecondsToHours(data); }
|
||||
hc_plays_by_month_options.yAxis.min = 0;
|
||||
hc_plays_by_month_options.xAxis.categories = data.categories;
|
||||
hc_plays_by_month_options.series = data.series;
|
||||
@@ -528,15 +557,15 @@
|
||||
}
|
||||
|
||||
// Set initial state
|
||||
if (current_tab == '#tabs-1') { loadGraphsTab1(current_range, yaxis); }
|
||||
if (current_tab == '#tabs-2') { loadGraphsTab2(current_range, yaxis); }
|
||||
if (current_tab == '#tabs-3') { loadGraphsTab3(yaxis); }
|
||||
if (current_tab == '#tabs-1') { loadGraphsTab1(current_day_range, yaxis); }
|
||||
if (current_tab == '#tabs-2') { loadGraphsTab2(current_day_range, yaxis); }
|
||||
if (current_tab == '#tabs-3') { loadGraphsTab3(current_month_range, yaxis); }
|
||||
|
||||
// Tab1 opened
|
||||
$('#graph-tabs a[href="#tabs-1"]').on('shown.bs.tab', function (e) {
|
||||
e.preventDefault();
|
||||
current_tab = $(this).attr('href');
|
||||
loadGraphsTab1(current_range, yaxis);
|
||||
loadGraphsTab1(current_day_range, yaxis);
|
||||
$.ajax({
|
||||
url: 'set_graph_config',
|
||||
data: { graph_tab: current_tab.replace('#','') },
|
||||
@@ -548,7 +577,7 @@
|
||||
$('#graph-tabs a[href="#tabs-2"]').on('shown.bs.tab', function (e) {
|
||||
e.preventDefault();
|
||||
current_tab = $(this).attr('href');
|
||||
loadGraphsTab2(current_range, yaxis);
|
||||
loadGraphsTab2(current_day_range, yaxis);
|
||||
$.ajax({
|
||||
url: 'set_graph_config',
|
||||
data: { graph_tab: current_tab.replace('#','') },
|
||||
@@ -560,7 +589,7 @@
|
||||
$('#graph-tabs a[href="#tabs-3"]').on('shown.bs.tab', function (e) {
|
||||
e.preventDefault();
|
||||
current_tab = $(this).attr('href');
|
||||
loadGraphsTab3(yaxis);
|
||||
loadGraphsTab3(current_month_range, yaxis);
|
||||
$.ajax({
|
||||
url: 'set_graph_config',
|
||||
data: { graph_tab: current_tab.replace('#','') },
|
||||
@@ -570,17 +599,35 @@
|
||||
|
||||
// Date range changed
|
||||
$('#graph-days').on('change', function() {
|
||||
current_range = $(this).val();
|
||||
if (current_range < 1) {
|
||||
current_day_range = Math.round($(this).val());
|
||||
$(this).val(current_day_range);
|
||||
if (current_day_range < 1) {
|
||||
$(this).val(7);
|
||||
current_range = 7;
|
||||
current_day_range = 7;
|
||||
}
|
||||
if (current_tab == '#tabs-1') { loadGraphsTab1(current_range, yaxis); }
|
||||
if (current_tab == '#tabs-2') { loadGraphsTab2(current_range, yaxis); }
|
||||
$('.days').html(current_range);
|
||||
if (current_tab == '#tabs-1') { loadGraphsTab1(current_day_range, yaxis); }
|
||||
if (current_tab == '#tabs-2') { loadGraphsTab2(current_day_range, yaxis); }
|
||||
$('.days').html(current_day_range);
|
||||
$.ajax({
|
||||
url: 'set_graph_config',
|
||||
data: { graph_days: current_range},
|
||||
data: { graph_days: current_day_range},
|
||||
async: true
|
||||
});
|
||||
});
|
||||
|
||||
// Month range changed
|
||||
$('#graph-months').on('change', function() {
|
||||
current_month_range = Math.round($(this).val());
|
||||
$(this).val(current_month_range);
|
||||
if (current_month_range < 1) {
|
||||
$(this).val(12);
|
||||
current_month_range = 12;
|
||||
}
|
||||
if (current_tab == '#tabs-3') { loadGraphsTab3(current_month_range, yaxis); }
|
||||
$('.months').html(current_month_range);
|
||||
$.ajax({
|
||||
url: 'set_graph_config',
|
||||
data: { graph_months: current_month_range},
|
||||
async: true
|
||||
});
|
||||
});
|
||||
@@ -588,17 +635,17 @@
|
||||
// User changed
|
||||
$('#graph-user').on('change', function() {
|
||||
selected_user_id = $(this).val() || null;
|
||||
if (current_tab == '#tabs-1') { loadGraphsTab1(current_range, yaxis); }
|
||||
if (current_tab == '#tabs-2') { loadGraphsTab2(current_range, yaxis); }
|
||||
if (current_tab == '#tabs-3') { loadGraphsTab3(yaxis); }
|
||||
if (current_tab == '#tabs-1') { loadGraphsTab1(current_day_range, yaxis); }
|
||||
if (current_tab == '#tabs-2') { loadGraphsTab2(current_day_range, yaxis); }
|
||||
if (current_tab == '#tabs-3') { loadGraphsTab3(current_month_range, yaxis); }
|
||||
});
|
||||
|
||||
// Y-axis changed
|
||||
$('#yaxis-selection').on('change', function() {
|
||||
yaxis = $('input[name=yaxis-options]:checked', '#yaxis-selection').val();
|
||||
if (current_tab == '#tabs-1') { loadGraphsTab1(current_range, yaxis); }
|
||||
if (current_tab == '#tabs-2') { loadGraphsTab2(current_range, yaxis); }
|
||||
if (current_tab == '#tabs-3') { loadGraphsTab3(yaxis); }
|
||||
if (current_tab == '#tabs-1') { loadGraphsTab1(current_day_range, yaxis); }
|
||||
if (current_tab == '#tabs-2') { loadGraphsTab2(current_day_range, yaxis); }
|
||||
if (current_tab == '#tabs-3') { loadGraphsTab3(current_month_range, yaxis); }
|
||||
$.ajax({
|
||||
url: 'set_graph_config',
|
||||
data: { graph_type: yaxis},
|
||||
@@ -610,56 +657,55 @@
|
||||
if (type === 'plays') {
|
||||
yaxis_format = function() { return this.value; };
|
||||
tooltip_format = function() {
|
||||
if (moment(this.x, 'X').isValid() && (this.x > 946684800)) {
|
||||
var s = '<b>'+ moment(this.x).format("ddd MMM D") +'</b>';
|
||||
} else {
|
||||
var s = '<b>'+ this.x +'</b>';
|
||||
}
|
||||
if (this.points.length > 1) {
|
||||
var total = 0;
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+point.y;
|
||||
total += point.y;
|
||||
});
|
||||
s += '<br><b>Total: '+total+'</b>';
|
||||
} else {
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+point.y;
|
||||
});
|
||||
}
|
||||
return s;
|
||||
}
|
||||
if (moment(this.x, 'X').isValid() && (this.x > 946684800)) {
|
||||
var s = '<b>'+ moment(this.x).format('ddd MMM D') +'</b>';
|
||||
} else {
|
||||
var s = '<b>'+ this.x +'</b>';
|
||||
}
|
||||
if (this.points.length > 1) {
|
||||
var total = 0;
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+point.y;
|
||||
total += point.y;
|
||||
});
|
||||
s += '<br><b>Total: '+total+'</b>';
|
||||
} else {
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+point.y;
|
||||
});
|
||||
}
|
||||
return s;
|
||||
}
|
||||
stack_labels_format = function() {
|
||||
return this.total;
|
||||
}
|
||||
|
||||
return this.total;
|
||||
}
|
||||
$('.yaxis-text').html('Play count');
|
||||
} else {
|
||||
yaxis_format = function() { return moment.duration(this.value, 'seconds').format("H [h] m [m]"); };
|
||||
yaxis_format = function() { return moment.duration(this.value, 'hours').format('H [h] m [m]'); };
|
||||
tooltip_format = function() {
|
||||
if (moment(this.x, 'X').isValid() && (this.x > 946684800)) {
|
||||
var s = '<b>'+ moment(this.x).format("ddd MMM D") +'</b>';
|
||||
} else {
|
||||
var s = '<b>'+ this.x +'</b>';
|
||||
}
|
||||
if (this.points.length > 1) {
|
||||
var total = 0;
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+moment.duration(point.y, 'seconds').format('D [days] H [hrs] m [mins]');
|
||||
total += point.y;
|
||||
});
|
||||
s += '<br/><b>Total: '+moment.duration(total, 'seconds').format('D [days] H [hrs] m [mins]')+'</b>';
|
||||
} else {
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+moment.duration(point.y, 'seconds').format('D [days] H [hrs] m [mins]');
|
||||
});
|
||||
}
|
||||
return s;
|
||||
}
|
||||
if (moment(this.x, 'X').isValid() && (this.x > 946684800)) {
|
||||
var s = '<b>'+ moment(this.x).format('ddd MMM D') +'</b>';
|
||||
} else {
|
||||
var s = '<b>'+ this.x +'</b>';
|
||||
}
|
||||
if (this.points.length > 1) {
|
||||
var total = 0;
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+moment.duration(point.y, 'hours').format('D [days] H [hrs] m [mins]');
|
||||
total += point.y;
|
||||
});
|
||||
s += '<br/><b>Total: '+moment.duration(total, 'hours').format('D [days] H [hrs] m [mins]')+'</b>';
|
||||
} else {
|
||||
$.each(this.points, function(i, point) {
|
||||
s += '<br/>'+point.series.name+': '+moment.duration(point.y, 'hours').format('D [days] H [hrs] m [mins]');
|
||||
});
|
||||
}
|
||||
return s;
|
||||
}
|
||||
stack_labels_format = function() {
|
||||
var s = moment.duration(this.total, 'seconds').format("H [hrs] m [mins]");
|
||||
return s;
|
||||
}
|
||||
var s = moment.duration(this.total, 'hours').format('H [h] m [m]');
|
||||
return s;
|
||||
}
|
||||
$('.yaxis-text').html('Play duration');
|
||||
}
|
||||
|
||||
|
@@ -103,6 +103,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -149,6 +152,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -199,6 +205,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -241,6 +250,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -295,6 +307,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -341,6 +356,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -391,6 +409,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -433,6 +454,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
@@ -485,18 +509,21 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['rating_key']:
|
||||
<a href="info?rating_key=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||
% if top_stat['rows'][0]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
<div class="home-platforms-instance-cover">
|
||||
<div class="home-platforms-cover-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-cover">
|
||||
<div class="home-platforms-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
</a>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-cover">
|
||||
<div class="home-platforms-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
% if len(top_stat['rows']) > 1:
|
||||
@@ -531,18 +558,21 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['rating_key']:
|
||||
<a href="info?rating_key=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
<div class="home-platforms-instance-list-cover">
|
||||
<div class="home-platforms-list-cover-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-list-cover">
|
||||
<div class="home-platforms-list-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
</a>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-list-cover">
|
||||
<div class="home-platforms-list-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
<div class="home-platforms-instance-list-number">
|
||||
@@ -581,18 +611,21 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['rating_key']:
|
||||
<a href="info?rating_key=${top_stat['rows'][0]['rating_key']}" title="${top_stat['rows'][0]['title']}">
|
||||
% if top_stat['rows'][0]['grandparent_thumb'] != '':
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
<div class="home-platforms-instance-cover">
|
||||
<div class="home-platforms-cover-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['grandparent_thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-cover">
|
||||
<div class="home-platforms-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
</a>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-cover">
|
||||
<div class="home-platforms-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
% if len(top_stat['rows']) > 1:
|
||||
@@ -623,18 +656,21 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['rating_key']:
|
||||
<a href="info?rating_key=${top_stat['rows'][loop.index]['rating_key']}" title="${top_stat['rows'][loop.index]['title']}">
|
||||
% if top_stat['rows'][loop.index]['grandparent_thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=poster);"></div>
|
||||
<div class="home-platforms-instance-list-cover">
|
||||
<div class="home-platforms-list-cover-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['grandparent_thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-list-cover">
|
||||
<div class="home-platforms-list-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
</a>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(${http_root}images/poster.png);"></div>
|
||||
<div class="home-platforms-instance-list-cover">
|
||||
<div class="home-platforms-list-cover-face" style="background-image: url(${http_root}images/cover.png);"></div>
|
||||
</div>
|
||||
% endif
|
||||
<div class="home-platforms-instance-list-number">
|
||||
@@ -847,6 +883,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][0]['thumb']:
|
||||
<div class="home-platforms-instance-poster">
|
||||
<div class="home-platforms-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][0]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-poster">
|
||||
@@ -903,6 +942,9 @@ DOCUMENTATION :: END
|
||||
% if top_stat['rows'][loop.index]['thumb']:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
<div class="home-platforms-list-poster-face" style="background-image: url(pms_image_proxy?img=${top_stat['rows'][loop.index]['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
% else:
|
||||
<div class="home-platforms-instance-list-poster">
|
||||
|
BIN
data/interfaces/default/images/flattr-badge-large.png
Normal file
After Width: | Height: | Size: 2.2 KiB |
BIN
data/interfaces/default/images/gold-rect-paypal-34px.png
Normal file
After Width: | Height: | Size: 2.7 KiB |
BIN
data/interfaces/default/images/platforms/plextogether.png
Normal file
After Width: | Height: | Size: 4.2 KiB |
Before Width: | Height: | Size: 137 KiB |
Before Width: | Height: | Size: 3.7 KiB After Width: | Height: | Size: 3.7 KiB |
Before Width: | Height: | Size: 8.1 KiB After Width: | Height: | Size: 8.1 KiB |
Before Width: | Height: | Size: 2.3 KiB After Width: | Height: | Size: 2.3 KiB |
Before Width: | Height: | Size: 5.2 KiB After Width: | Height: | Size: 5.2 KiB |
Before Width: | Height: | Size: 3.0 KiB After Width: | Height: | Size: 3.0 KiB |
Before Width: | Height: | Size: 6.5 KiB After Width: | Height: | Size: 6.5 KiB |
@@ -103,10 +103,25 @@
|
||||
type: 'GET',
|
||||
cache: false,
|
||||
async: true,
|
||||
error: function (xhr, status, error) {
|
||||
console.log(status + ': ' + error);
|
||||
},
|
||||
complete: function (xhr, status) {
|
||||
$('#dashboard-checking-activity').remove();
|
||||
|
||||
var current_activity = $.parseJSON(xhr.responseText);
|
||||
var current_activity;
|
||||
try {
|
||||
current_activity = $.parseJSON(xhr.responseText);
|
||||
} catch (e) {
|
||||
console.log(status + ': ' + e);
|
||||
current_activity = null;
|
||||
}
|
||||
|
||||
if (!(current_activity)) {
|
||||
$('#currentActivity').html('<div id="dashboard-no-activity" class="text-muted">There was an error communicating with your Plex Server.</div>');
|
||||
return
|
||||
}
|
||||
|
||||
var stream_count = parseInt(current_activity.stream_count);
|
||||
var sessions = current_activity.sessions;
|
||||
|
||||
@@ -150,13 +165,38 @@
|
||||
bif_poster.animate({ opacity: 0 }, { duration: 1000, queue: false });
|
||||
bif_poster.after($('<div id="bif-' + key + '"class="dashboard-activity-poster-face" style="background-image: url(pms_image_proxy?img='
|
||||
+ s.bif_thumb + '&width=500&height=280&fallback=art);"></div>').fadeIn(1000, function () { bif_poster.remove() }));
|
||||
blurArtwork(key);
|
||||
}
|
||||
|
||||
// if transcoding, update the transcode state
|
||||
var ts = '';
|
||||
if (s.video_decision == 'transcode' || s.audio_decision == 'transcode') {
|
||||
var throttled = (s.throttled == '1') ? ' (Throttled)' : '';
|
||||
$('#transcode-state-' + key).html('(Speed: ' + s.transcode_speed + ')' + throttled);
|
||||
ts += 'Stream <strong>Transcode (Speed: ' + s.transcode_speed + ')' + throttled + '</strong><br>';
|
||||
} else if (s.video_decision == 'copy' || s.audio_decision == 'copy') {
|
||||
ts += 'Stream <strong>Direct Stream</strong><br>';
|
||||
} else {
|
||||
ts += 'Stream <strong>Direct Play</strong><br>';
|
||||
}
|
||||
if (s.video_decision != '' && s.media_type != 'photo') {
|
||||
if (s.video_decision == 'transcode') {
|
||||
ts += 'Video <strong>Transcode (' + s.transcode_video_codec + ') (' + s.transcode_width + 'x' + s.transcode_height + ')</strong><br>';
|
||||
} else if (s.video_decision == 'copy') {
|
||||
ts += 'Video <strong>Direct Stream (' + s.transcode_video_codec + ') (' + s.width + 'x' + s.height + ')</strong><br>';
|
||||
} else {
|
||||
ts += 'Video <strong>Direct Play (' + s.video_codec + ') (' + s.width + 'x' + s.height + ')</strong><br>';
|
||||
}
|
||||
}
|
||||
if (s.audio_decision != '') {
|
||||
if (s.audio_decision == 'transcode') {
|
||||
ts += 'Audio <strong>Transcode (' + s.transcode_audio_codec + ') (' + s.transcode_audio_channels + 'ch)</strong>';
|
||||
} else if (s.audio_decision == 'copy') {
|
||||
ts += 'Audio <strong>Direct Stream (' + s.transcode_audio_codec + ') (' + s.transcode_audio_channels + 'ch)</strong>';
|
||||
} else {
|
||||
ts += 'Audio <strong>Direct Play (' + s.audio_codec + ') (' + s.audio_channels + 'ch)</strong>';
|
||||
}
|
||||
}
|
||||
$('#transcode-state-' + key).html(ts);
|
||||
|
||||
// update the stream progress times
|
||||
$('#stream-eta-' + key).html(moment().add(parseInt(s.duration) - parseInt(s.view_offset), 'milliseconds').format(time_format));
|
||||
@@ -164,8 +204,11 @@
|
||||
|
||||
// update the progress bars
|
||||
// percent - 3 because of 3px padding-right
|
||||
$('#bufferbar-' + key).width(parseInt(s.transcode_progress) - 3 + '%').html(s.transcode_progress + '%');
|
||||
$('#bar-' + key).width(parseInt(s.progress_percent) - 3 + '%').html(s.progress_percent + '%');
|
||||
$('#bufferbar-' + key).width(parseInt(s.transcode_progress) - 3 + '%').html(s.transcode_progress + '%')
|
||||
.attr('data-original-title', 'Transcoder Progress ' + s.transcode_progress + '%');
|
||||
$('#bar-' + key).width(parseInt(s.progress_percent) - 3 + '%').html(s.progress_percent + '%')
|
||||
.attr('data-original-title', 'Stream Progress ' + s.progress_percent + '%');
|
||||
|
||||
|
||||
// add temporary class so we know which instances are still active
|
||||
instance.addClass('updated-temp');
|
||||
@@ -210,14 +253,18 @@
|
||||
getCurrentActivity();
|
||||
}, 15000);
|
||||
|
||||
function blurArtwork(session_key) {
|
||||
var filterVal = $('#stream-' + session_key).is(':visible') ? 'blur(5px)' : '';
|
||||
$($('#poster-' + session_key).find('.dashboard-activity-poster-face, .dashboard-activity-cover-face'))
|
||||
.css('filter', filterVal).css('webkitFilter', filterVal).css('mozFilter', filterVal).css('oFilter', filterVal).css('msFilter', filterVal);
|
||||
}
|
||||
|
||||
// Show/Hide activity info
|
||||
$('#currentActivity').on('click', '.btn-activity-info', function (e) {
|
||||
e.preventDefault();
|
||||
$($(this).attr('data-target')).toggle();
|
||||
var id = $(this).closest('.dashboard-instance').data('id');
|
||||
var filterVal = $('#stream-' + id).is(':visible') ? 'blur(5px)' : '';
|
||||
$($(this).closest('.dashboard-activity-poster').find('.dashboard-activity-poster-face, .dashboard-activity-cover-face'))
|
||||
.css('filter',filterVal).css('webkitFilter',filterVal).css('mozFilter',filterVal).css('oFilter',filterVal).css('msFilter',filterVal);
|
||||
var key = $(this).data('id');
|
||||
blurArtwork(key);
|
||||
});
|
||||
|
||||
// Add hover class to dashboard-instance
|
||||
|
@@ -53,6 +53,9 @@ DOCUMENTATION :: END
|
||||
if re.match(pattern, codec):
|
||||
return file
|
||||
return codec
|
||||
|
||||
def br(text):
|
||||
return text.replace('\n', '<br /><br />')
|
||||
%>
|
||||
|
||||
<%inherit file="base.html"/>
|
||||
@@ -68,6 +71,9 @@ DOCUMENTATION :: END
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<div class="art-face" style="background-image:url(pms_image_proxy?img=${data['art']}&width=1920&height=1080)"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image info-art" title="Refresh background image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="summary-container">
|
||||
<div class="summary-navbar">
|
||||
<div class="col-md-12">
|
||||
@@ -109,9 +115,9 @@ DOCUMENTATION :: END
|
||||
<div class="col-md-9">
|
||||
<div class="summary-content-poster hidden-xs hidden-sm">
|
||||
% if data['media_type'] == 'track':
|
||||
<a href="http://app.plex.tv/web/app#!/server/${config['pms_identifier']}/details/%2Flibrary%2Fmetadata%2F${data['parent_rating_key']}" target="_blank" title="View in Plex Web">
|
||||
<a href="https://app.plex.tv/web/app#!/server/${config['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${data['parent_rating_key']}" target="_blank" title="View in Plex Web">
|
||||
% else:
|
||||
<a href="http://app.plex.tv/web/app#!/server/${config['pms_identifier']}/details/%2Flibrary%2Fmetadata%2F${data['rating_key']}" target="_blank" title="View in Plex Web">
|
||||
<a href="https://app.plex.tv/web/app#!/server/${config['pms_identifier']}/details?key=%2Flibrary%2Fmetadata%2F${data['rating_key']}" target="_blank" title="View in Plex Web">
|
||||
% endif
|
||||
% if data['media_type'] == 'episode':
|
||||
<div class="summary-poster-face-episode" style="background-image: url(pms_image_proxy?img=${data['thumb']}&width=500&height=280&fallback=art);">
|
||||
@@ -119,18 +125,27 @@ DOCUMENTATION :: END
|
||||
<span></span>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% elif data['media_type'] == 'artist' or data['media_type'] == 'album' or data['media_type'] == 'track':
|
||||
<div class="summary-poster-face-track" style="background-image: url(pms_image_proxy?img=${data['thumb']}&width=500&height=500&fallback=cover);">
|
||||
<div class="summary-poster-face-overlay">
|
||||
<span></span>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% else:
|
||||
<div class="summary-poster-face" style="background-image: url(pms_image_proxy?img=${data['thumb']}&width=300&height=450&fallback=poster);">
|
||||
<div class="summary-poster-face-overlay">
|
||||
<span></span>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% endif
|
||||
</a>
|
||||
</div>
|
||||
@@ -238,7 +253,7 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
% endif
|
||||
<div class="summary-content-summary">
|
||||
<p> ${data['summary']} </p>
|
||||
<p> ${data['summary'] | br, n} </p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -51,6 +51,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
% elif data['children_type'] == 'episode':
|
||||
@@ -63,6 +66,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="item-children-instance-text-wrapper episode-item">
|
||||
@@ -74,6 +80,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" title="${child['title']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
|
@@ -65,6 +65,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper season-item">
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
@@ -87,6 +90,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper season-item">
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
@@ -109,6 +115,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face season-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=450&fallback=poster);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper season-item">
|
||||
<h3 title="${child['parent_title']}">${child['parent_title']}</h3>
|
||||
@@ -131,6 +140,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face episode-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=500&height=250&fallback=art);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper episode-item">
|
||||
<h3 title="${child['grandparent_title']}">${child['grandparent_title']}</h3>
|
||||
@@ -154,6 +166,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
@@ -175,6 +190,9 @@ DOCUMENTATION :: END
|
||||
<a href="info?rating_key=${child['rating_key']}" id="${child['rating_key']}">
|
||||
<div class="item-children-poster">
|
||||
<div class="item-children-poster-face album-poster" style="background-image: url(pms_image_proxy?img=${child['thumb']}&width=300&height=300&fallback=cover);"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
<h3 title="${child['parent_title']}">${child['parent_title']}</h3>
|
||||
@@ -204,6 +222,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
<div class="item-children-instance-text-wrapper album-item">
|
||||
<h3 title="${child['grandparent_title']}">${child['grandparent_title']}</h3>
|
||||
<h3 title="${child['title']}">${child['title']}</h3>
|
||||
|
@@ -3,37 +3,56 @@
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"><i class="fa fa-remove"></i></button>
|
||||
<h4 class="modal-title" id="myModalLabel">
|
||||
% if data:
|
||||
<strong><span id="modal_header_ip_address">
|
||||
<i class="fa fa-spin fa-refresh"></i> Loading Details...
|
||||
% if data:
|
||||
<i class="fa fa-map-marker"></i> IP Address: ${data}
|
||||
% else:
|
||||
<i class="fa fa-exclamation-circle"></i> Invalid IP Address
|
||||
% endif
|
||||
</span></strong>
|
||||
% else:
|
||||
<i class="fa fa-exclamation-circle"></i> Invalid IP Address</span></strong>
|
||||
% endif
|
||||
</h4>
|
||||
</div>
|
||||
<div class="modal-body" id="modal-text">
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Location Details</strong><span id="ip_loading" style="padding-left: 5px;"><i class="fa fa-refresh fa-spin"></i></span></h4>
|
||||
</div>
|
||||
<div id="ip_error" class="col-sm-12 text-muted"></div>
|
||||
<div class="col-sm-6">
|
||||
<h4><strong>Location Details</strong></h4>
|
||||
<ul class="list-unstyled">
|
||||
<li>Continent: <strong><span id="continent"></span></strong></li>
|
||||
<li>Country: <strong><span id="country"></span></strong></li>
|
||||
<li>Region: <strong><span id="region"></span></strong></li>
|
||||
<li>City: <strong><span id="city"></span></strong></li>
|
||||
<li>Timezone: <strong><span id="timezone"></span></strong></li>
|
||||
<li>Latitude: <strong><span id="lat"></span></strong></li>
|
||||
<li>Longitude: <strong><span id="lon"></span></strong></li>
|
||||
<li>Postal Code: <strong><span id="postal_code"></span></strong></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-sm-6">
|
||||
<h4><strong>Connection Details</strong></h4>
|
||||
<ul class="list-unstyled">
|
||||
<li>Organization: <strong><span id="organization"></span></strong></li>
|
||||
<li>Timezone: <strong><span id="timezone"></span></strong></li>
|
||||
<li>Latitude: <strong><span id="latitude"></span></strong></li>
|
||||
<li>Longitude: <strong><span id="longitude"></span></strong></li>
|
||||
<li>Accuracy Radius: <strong><span id="accuracy"></span></strong></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Connection Details</strong><span id="isp_loading" style="padding-left: 5px;"><i class="fa fa-refresh fa-spin"></i></span></h4>
|
||||
</div>
|
||||
<div id="isp_error" class="col-sm-12 text-muted"></div>
|
||||
<div class="col-sm-12">
|
||||
<ul class="list-unstyled">
|
||||
<li>Host: <strong><span id="isp_host"></span></strong></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-sm-6" id="isp_instance">
|
||||
<ul class="list-unstyled">
|
||||
<li>ISP: <strong><span id="isp_name"></span></strong></li>
|
||||
<li>Address: <strong><span id="isp_address"></span></strong></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<% from plexpy.helpers import anon_url %>
|
||||
<span class="text-muted">Telize service written by <a href="${anon_url('https://github.com/fcambus/telize')}" target="_blank">Frederic Cambus</a>.</span>
|
||||
<span class="text-muted">GeoLite2 data created by <a href="${anon_url('http://www.maxmind.com')}" target="_blank">MaxMind</a>.</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -42,27 +61,79 @@
|
||||
<script>
|
||||
function getUserLocation(ip_address) {
|
||||
$.ajax({
|
||||
url: 'https://telize.myhtpc.co.za/geoip/' + ip_address,
|
||||
url: 'get_geoip_lookup',
|
||||
type: 'GET',
|
||||
data: { ip_address: ip_address },
|
||||
cache: true,
|
||||
async: true,
|
||||
type: 'GET',
|
||||
dataType: 'json',
|
||||
error: function(){
|
||||
$('#modal_header_ip_address').html("Request failed. Server may be too busy.");
|
||||
complete: function () {
|
||||
$('#ip_loading').remove();
|
||||
},
|
||||
success: function(data) {
|
||||
$('#modal_header_ip_address').html('<i class="fa fa-map-marker"></i> IP Address: ' + ip_address);
|
||||
$('#country').html(data.country);
|
||||
$('#city').html(data.city);
|
||||
$('#region').html(data.region);
|
||||
$('#timezone').html(data.timezone);
|
||||
$('#lat').html(data.latitude);
|
||||
$('#lon').html(data.longitude);
|
||||
$('#organization').html(data.organization);
|
||||
error: function () {
|
||||
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> Internal request failed.').show();
|
||||
},
|
||||
timeout: 5000
|
||||
success: function (data) {
|
||||
if ('error' in data) {
|
||||
$('#ip_error').html('<i class="fa fa-exclamation-circle"></i> ' + data.error).show();
|
||||
} else {
|
||||
$('#continent').html(data.continent);
|
||||
$('#country').html(data.country);
|
||||
$('#region').html(data.region);
|
||||
$('#city').html(data.city);
|
||||
$('#postal_code').html(data.postal_code);
|
||||
$('#timezone').html(data.timezone);
|
||||
$('#latitude').html(data.latitude);
|
||||
$('#longitude').html(data.longitude);
|
||||
$('#accuracy').html(data.accuracy + ' km');
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function getUserConnection(ip_address) {
|
||||
$.ajax({
|
||||
url: 'get_whois_lookup',
|
||||
type: 'GET',
|
||||
data: { ip_address: ip_address },
|
||||
cache: true,
|
||||
async: true,
|
||||
complete: function () {
|
||||
$('#isp_loading').remove();
|
||||
},
|
||||
error: function () {
|
||||
$('#isp_error').html('<i class="fa fa-exclamation-circle"></i> Internal request failed.').show();
|
||||
},
|
||||
success: function (data) {
|
||||
$('#isp_host').html(data.host);
|
||||
if ('error' in data) {
|
||||
$('#isp_error').html('<i class="fa fa-exclamation-circle"></i> ' + data.error).show();
|
||||
} else if (data.nets.length) {
|
||||
$('#isp_instance').remove();
|
||||
$.each(data.nets, function (index, net) {
|
||||
var s = '';
|
||||
if (net.city || net.state || net.postal_code) {
|
||||
s = (net.city && net.state) ? net.city + ', ' + net.state : net.city || net.state || '';
|
||||
s = (s && net.postal_code) ? s + ' ' + net.postal_code : s || net.postal_code || '';
|
||||
}
|
||||
s = (s) ? '<strong>' + s + '</strong><br />' : s;
|
||||
$('#modal-text').append('<div class="col-sm-6"> \
|
||||
<ul class="list-unstyled"> \
|
||||
<li>ISP: <strong>' + net.description + '</strong></li> \
|
||||
<li><span style="float: left;">Address: </span> \
|
||||
<span style="float: left;"><strong>' + net.address + '</strong><br />' + s +
|
||||
'<strong>' + net.country + '</strong></span> \
|
||||
</li> \
|
||||
</ul> \
|
||||
</div>')
|
||||
});
|
||||
} else {
|
||||
$('#isp_name, #isp_address').html("Not available");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
getUserLocation('${data}');
|
||||
getUserConnection('${data}');
|
||||
</script>
|
||||
% endif
|
28
data/interfaces/default/js/jquery.qrcode.min.js
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
(function(r){r.fn.qrcode=function(h){var s;function u(a){this.mode=s;this.data=a}function o(a,c){this.typeNumber=a;this.errorCorrectLevel=c;this.modules=null;this.moduleCount=0;this.dataCache=null;this.dataList=[]}function q(a,c){if(void 0==a.length)throw Error(a.length+"/"+c);for(var d=0;d<a.length&&0==a[d];)d++;this.num=Array(a.length-d+c);for(var b=0;b<a.length-d;b++)this.num[b]=a[b+d]}function p(a,c){this.totalCount=a;this.dataCount=c}function t(){this.buffer=[];this.length=0}u.prototype={getLength:function(){return this.data.length},
|
||||
write:function(a){for(var c=0;c<this.data.length;c++)a.put(this.data.charCodeAt(c),8)}};o.prototype={addData:function(a){this.dataList.push(new u(a));this.dataCache=null},isDark:function(a,c){if(0>a||this.moduleCount<=a||0>c||this.moduleCount<=c)throw Error(a+","+c);return this.modules[a][c]},getModuleCount:function(){return this.moduleCount},make:function(){if(1>this.typeNumber){for(var a=1,a=1;40>a;a++){for(var c=p.getRSBlocks(a,this.errorCorrectLevel),d=new t,b=0,e=0;e<c.length;e++)b+=c[e].dataCount;
|
||||
for(e=0;e<this.dataList.length;e++)c=this.dataList[e],d.put(c.mode,4),d.put(c.getLength(),j.getLengthInBits(c.mode,a)),c.write(d);if(d.getLengthInBits()<=8*b)break}this.typeNumber=a}this.makeImpl(!1,this.getBestMaskPattern())},makeImpl:function(a,c){this.moduleCount=4*this.typeNumber+17;this.modules=Array(this.moduleCount);for(var d=0;d<this.moduleCount;d++){this.modules[d]=Array(this.moduleCount);for(var b=0;b<this.moduleCount;b++)this.modules[d][b]=null}this.setupPositionProbePattern(0,0);this.setupPositionProbePattern(this.moduleCount-
|
||||
7,0);this.setupPositionProbePattern(0,this.moduleCount-7);this.setupPositionAdjustPattern();this.setupTimingPattern();this.setupTypeInfo(a,c);7<=this.typeNumber&&this.setupTypeNumber(a);null==this.dataCache&&(this.dataCache=o.createData(this.typeNumber,this.errorCorrectLevel,this.dataList));this.mapData(this.dataCache,c)},setupPositionProbePattern:function(a,c){for(var d=-1;7>=d;d++)if(!(-1>=a+d||this.moduleCount<=a+d))for(var b=-1;7>=b;b++)-1>=c+b||this.moduleCount<=c+b||(this.modules[a+d][c+b]=
|
||||
0<=d&&6>=d&&(0==b||6==b)||0<=b&&6>=b&&(0==d||6==d)||2<=d&&4>=d&&2<=b&&4>=b?!0:!1)},getBestMaskPattern:function(){for(var a=0,c=0,d=0;8>d;d++){this.makeImpl(!0,d);var b=j.getLostPoint(this);if(0==d||a>b)a=b,c=d}return c},createMovieClip:function(a,c,d){a=a.createEmptyMovieClip(c,d);this.make();for(c=0;c<this.modules.length;c++)for(var d=1*c,b=0;b<this.modules[c].length;b++){var e=1*b;this.modules[c][b]&&(a.beginFill(0,100),a.moveTo(e,d),a.lineTo(e+1,d),a.lineTo(e+1,d+1),a.lineTo(e,d+1),a.endFill())}return a},
|
||||
setupTimingPattern:function(){for(var a=8;a<this.moduleCount-8;a++)null==this.modules[a][6]&&(this.modules[a][6]=0==a%2);for(a=8;a<this.moduleCount-8;a++)null==this.modules[6][a]&&(this.modules[6][a]=0==a%2)},setupPositionAdjustPattern:function(){for(var a=j.getPatternPosition(this.typeNumber),c=0;c<a.length;c++)for(var d=0;d<a.length;d++){var b=a[c],e=a[d];if(null==this.modules[b][e])for(var f=-2;2>=f;f++)for(var i=-2;2>=i;i++)this.modules[b+f][e+i]=-2==f||2==f||-2==i||2==i||0==f&&0==i?!0:!1}},setupTypeNumber:function(a){for(var c=
|
||||
j.getBCHTypeNumber(this.typeNumber),d=0;18>d;d++){var b=!a&&1==(c>>d&1);this.modules[Math.floor(d/3)][d%3+this.moduleCount-8-3]=b}for(d=0;18>d;d++)b=!a&&1==(c>>d&1),this.modules[d%3+this.moduleCount-8-3][Math.floor(d/3)]=b},setupTypeInfo:function(a,c){for(var d=j.getBCHTypeInfo(this.errorCorrectLevel<<3|c),b=0;15>b;b++){var e=!a&&1==(d>>b&1);6>b?this.modules[b][8]=e:8>b?this.modules[b+1][8]=e:this.modules[this.moduleCount-15+b][8]=e}for(b=0;15>b;b++)e=!a&&1==(d>>b&1),8>b?this.modules[8][this.moduleCount-
|
||||
b-1]=e:9>b?this.modules[8][15-b-1+1]=e:this.modules[8][15-b-1]=e;this.modules[this.moduleCount-8][8]=!a},mapData:function(a,c){for(var d=-1,b=this.moduleCount-1,e=7,f=0,i=this.moduleCount-1;0<i;i-=2)for(6==i&&i--;;){for(var g=0;2>g;g++)if(null==this.modules[b][i-g]){var n=!1;f<a.length&&(n=1==(a[f]>>>e&1));j.getMask(c,b,i-g)&&(n=!n);this.modules[b][i-g]=n;e--; -1==e&&(f++,e=7)}b+=d;if(0>b||this.moduleCount<=b){b-=d;d=-d;break}}}};o.PAD0=236;o.PAD1=17;o.createData=function(a,c,d){for(var c=p.getRSBlocks(a,
|
||||
c),b=new t,e=0;e<d.length;e++){var f=d[e];b.put(f.mode,4);b.put(f.getLength(),j.getLengthInBits(f.mode,a));f.write(b)}for(e=a=0;e<c.length;e++)a+=c[e].dataCount;if(b.getLengthInBits()>8*a)throw Error("code length overflow. ("+b.getLengthInBits()+">"+8*a+")");for(b.getLengthInBits()+4<=8*a&&b.put(0,4);0!=b.getLengthInBits()%8;)b.putBit(!1);for(;!(b.getLengthInBits()>=8*a);){b.put(o.PAD0,8);if(b.getLengthInBits()>=8*a)break;b.put(o.PAD1,8)}return o.createBytes(b,c)};o.createBytes=function(a,c){for(var d=
|
||||
0,b=0,e=0,f=Array(c.length),i=Array(c.length),g=0;g<c.length;g++){var n=c[g].dataCount,h=c[g].totalCount-n,b=Math.max(b,n),e=Math.max(e,h);f[g]=Array(n);for(var k=0;k<f[g].length;k++)f[g][k]=255&a.buffer[k+d];d+=n;k=j.getErrorCorrectPolynomial(h);n=(new q(f[g],k.getLength()-1)).mod(k);i[g]=Array(k.getLength()-1);for(k=0;k<i[g].length;k++)h=k+n.getLength()-i[g].length,i[g][k]=0<=h?n.get(h):0}for(k=g=0;k<c.length;k++)g+=c[k].totalCount;d=Array(g);for(k=n=0;k<b;k++)for(g=0;g<c.length;g++)k<f[g].length&&
|
||||
(d[n++]=f[g][k]);for(k=0;k<e;k++)for(g=0;g<c.length;g++)k<i[g].length&&(d[n++]=i[g][k]);return d};s=4;for(var j={PATTERN_POSITION_TABLE:[[],[6,18],[6,22],[6,26],[6,30],[6,34],[6,22,38],[6,24,42],[6,26,46],[6,28,50],[6,30,54],[6,32,58],[6,34,62],[6,26,46,66],[6,26,48,70],[6,26,50,74],[6,30,54,78],[6,30,56,82],[6,30,58,86],[6,34,62,90],[6,28,50,72,94],[6,26,50,74,98],[6,30,54,78,102],[6,28,54,80,106],[6,32,58,84,110],[6,30,58,86,114],[6,34,62,90,118],[6,26,50,74,98,122],[6,30,54,78,102,126],[6,26,52,
|
||||
78,104,130],[6,30,56,82,108,134],[6,34,60,86,112,138],[6,30,58,86,114,142],[6,34,62,90,118,146],[6,30,54,78,102,126,150],[6,24,50,76,102,128,154],[6,28,54,80,106,132,158],[6,32,58,84,110,136,162],[6,26,54,82,110,138,166],[6,30,58,86,114,142,170]],G15:1335,G18:7973,G15_MASK:21522,getBCHTypeInfo:function(a){for(var c=a<<10;0<=j.getBCHDigit(c)-j.getBCHDigit(j.G15);)c^=j.G15<<j.getBCHDigit(c)-j.getBCHDigit(j.G15);return(a<<10|c)^j.G15_MASK},getBCHTypeNumber:function(a){for(var c=a<<12;0<=j.getBCHDigit(c)-
|
||||
j.getBCHDigit(j.G18);)c^=j.G18<<j.getBCHDigit(c)-j.getBCHDigit(j.G18);return a<<12|c},getBCHDigit:function(a){for(var c=0;0!=a;)c++,a>>>=1;return c},getPatternPosition:function(a){return j.PATTERN_POSITION_TABLE[a-1]},getMask:function(a,c,d){switch(a){case 0:return 0==(c+d)%2;case 1:return 0==c%2;case 2:return 0==d%3;case 3:return 0==(c+d)%3;case 4:return 0==(Math.floor(c/2)+Math.floor(d/3))%2;case 5:return 0==c*d%2+c*d%3;case 6:return 0==(c*d%2+c*d%3)%2;case 7:return 0==(c*d%3+(c+d)%2)%2;default:throw Error("bad maskPattern:"+
|
||||
a);}},getErrorCorrectPolynomial:function(a){for(var c=new q([1],0),d=0;d<a;d++)c=c.multiply(new q([1,l.gexp(d)],0));return c},getLengthInBits:function(a,c){if(1<=c&&10>c)switch(a){case 1:return 10;case 2:return 9;case s:return 8;case 8:return 8;default:throw Error("mode:"+a);}else if(27>c)switch(a){case 1:return 12;case 2:return 11;case s:return 16;case 8:return 10;default:throw Error("mode:"+a);}else if(41>c)switch(a){case 1:return 14;case 2:return 13;case s:return 16;case 8:return 12;default:throw Error("mode:"+
|
||||
a);}else throw Error("type:"+c);},getLostPoint:function(a){for(var c=a.getModuleCount(),d=0,b=0;b<c;b++)for(var e=0;e<c;e++){for(var f=0,i=a.isDark(b,e),g=-1;1>=g;g++)if(!(0>b+g||c<=b+g))for(var h=-1;1>=h;h++)0>e+h||c<=e+h||0==g&&0==h||i==a.isDark(b+g,e+h)&&f++;5<f&&(d+=3+f-5)}for(b=0;b<c-1;b++)for(e=0;e<c-1;e++)if(f=0,a.isDark(b,e)&&f++,a.isDark(b+1,e)&&f++,a.isDark(b,e+1)&&f++,a.isDark(b+1,e+1)&&f++,0==f||4==f)d+=3;for(b=0;b<c;b++)for(e=0;e<c-6;e++)a.isDark(b,e)&&!a.isDark(b,e+1)&&a.isDark(b,e+
|
||||
2)&&a.isDark(b,e+3)&&a.isDark(b,e+4)&&!a.isDark(b,e+5)&&a.isDark(b,e+6)&&(d+=40);for(e=0;e<c;e++)for(b=0;b<c-6;b++)a.isDark(b,e)&&!a.isDark(b+1,e)&&a.isDark(b+2,e)&&a.isDark(b+3,e)&&a.isDark(b+4,e)&&!a.isDark(b+5,e)&&a.isDark(b+6,e)&&(d+=40);for(e=f=0;e<c;e++)for(b=0;b<c;b++)a.isDark(b,e)&&f++;a=Math.abs(100*f/c/c-50)/5;return d+10*a}},l={glog:function(a){if(1>a)throw Error("glog("+a+")");return l.LOG_TABLE[a]},gexp:function(a){for(;0>a;)a+=255;for(;256<=a;)a-=255;return l.EXP_TABLE[a]},EXP_TABLE:Array(256),
|
||||
LOG_TABLE:Array(256)},m=0;8>m;m++)l.EXP_TABLE[m]=1<<m;for(m=8;256>m;m++)l.EXP_TABLE[m]=l.EXP_TABLE[m-4]^l.EXP_TABLE[m-5]^l.EXP_TABLE[m-6]^l.EXP_TABLE[m-8];for(m=0;255>m;m++)l.LOG_TABLE[l.EXP_TABLE[m]]=m;q.prototype={get:function(a){return this.num[a]},getLength:function(){return this.num.length},multiply:function(a){for(var c=Array(this.getLength()+a.getLength()-1),d=0;d<this.getLength();d++)for(var b=0;b<a.getLength();b++)c[d+b]^=l.gexp(l.glog(this.get(d))+l.glog(a.get(b)));return new q(c,0)},mod:function(a){if(0>
|
||||
this.getLength()-a.getLength())return this;for(var c=l.glog(this.get(0))-l.glog(a.get(0)),d=Array(this.getLength()),b=0;b<this.getLength();b++)d[b]=this.get(b);for(b=0;b<a.getLength();b++)d[b]^=l.gexp(l.glog(a.get(b))+c);return(new q(d,0)).mod(a)}};p.RS_BLOCK_TABLE=[[1,26,19],[1,26,16],[1,26,13],[1,26,9],[1,44,34],[1,44,28],[1,44,22],[1,44,16],[1,70,55],[1,70,44],[2,35,17],[2,35,13],[1,100,80],[2,50,32],[2,50,24],[4,25,9],[1,134,108],[2,67,43],[2,33,15,2,34,16],[2,33,11,2,34,12],[2,86,68],[4,43,27],
|
||||
[4,43,19],[4,43,15],[2,98,78],[4,49,31],[2,32,14,4,33,15],[4,39,13,1,40,14],[2,121,97],[2,60,38,2,61,39],[4,40,18,2,41,19],[4,40,14,2,41,15],[2,146,116],[3,58,36,2,59,37],[4,36,16,4,37,17],[4,36,12,4,37,13],[2,86,68,2,87,69],[4,69,43,1,70,44],[6,43,19,2,44,20],[6,43,15,2,44,16],[4,101,81],[1,80,50,4,81,51],[4,50,22,4,51,23],[3,36,12,8,37,13],[2,116,92,2,117,93],[6,58,36,2,59,37],[4,46,20,6,47,21],[7,42,14,4,43,15],[4,133,107],[8,59,37,1,60,38],[8,44,20,4,45,21],[12,33,11,4,34,12],[3,145,115,1,146,
|
||||
116],[4,64,40,5,65,41],[11,36,16,5,37,17],[11,36,12,5,37,13],[5,109,87,1,110,88],[5,65,41,5,66,42],[5,54,24,7,55,25],[11,36,12],[5,122,98,1,123,99],[7,73,45,3,74,46],[15,43,19,2,44,20],[3,45,15,13,46,16],[1,135,107,5,136,108],[10,74,46,1,75,47],[1,50,22,15,51,23],[2,42,14,17,43,15],[5,150,120,1,151,121],[9,69,43,4,70,44],[17,50,22,1,51,23],[2,42,14,19,43,15],[3,141,113,4,142,114],[3,70,44,11,71,45],[17,47,21,4,48,22],[9,39,13,16,40,14],[3,135,107,5,136,108],[3,67,41,13,68,42],[15,54,24,5,55,25],[15,
|
||||
43,15,10,44,16],[4,144,116,4,145,117],[17,68,42],[17,50,22,6,51,23],[19,46,16,6,47,17],[2,139,111,7,140,112],[17,74,46],[7,54,24,16,55,25],[34,37,13],[4,151,121,5,152,122],[4,75,47,14,76,48],[11,54,24,14,55,25],[16,45,15,14,46,16],[6,147,117,4,148,118],[6,73,45,14,74,46],[11,54,24,16,55,25],[30,46,16,2,47,17],[8,132,106,4,133,107],[8,75,47,13,76,48],[7,54,24,22,55,25],[22,45,15,13,46,16],[10,142,114,2,143,115],[19,74,46,4,75,47],[28,50,22,6,51,23],[33,46,16,4,47,17],[8,152,122,4,153,123],[22,73,45,
|
||||
3,74,46],[8,53,23,26,54,24],[12,45,15,28,46,16],[3,147,117,10,148,118],[3,73,45,23,74,46],[4,54,24,31,55,25],[11,45,15,31,46,16],[7,146,116,7,147,117],[21,73,45,7,74,46],[1,53,23,37,54,24],[19,45,15,26,46,16],[5,145,115,10,146,116],[19,75,47,10,76,48],[15,54,24,25,55,25],[23,45,15,25,46,16],[13,145,115,3,146,116],[2,74,46,29,75,47],[42,54,24,1,55,25],[23,45,15,28,46,16],[17,145,115],[10,74,46,23,75,47],[10,54,24,35,55,25],[19,45,15,35,46,16],[17,145,115,1,146,116],[14,74,46,21,75,47],[29,54,24,19,
|
||||
55,25],[11,45,15,46,46,16],[13,145,115,6,146,116],[14,74,46,23,75,47],[44,54,24,7,55,25],[59,46,16,1,47,17],[12,151,121,7,152,122],[12,75,47,26,76,48],[39,54,24,14,55,25],[22,45,15,41,46,16],[6,151,121,14,152,122],[6,75,47,34,76,48],[46,54,24,10,55,25],[2,45,15,64,46,16],[17,152,122,4,153,123],[29,74,46,14,75,47],[49,54,24,10,55,25],[24,45,15,46,46,16],[4,152,122,18,153,123],[13,74,46,32,75,47],[48,54,24,14,55,25],[42,45,15,32,46,16],[20,147,117,4,148,118],[40,75,47,7,76,48],[43,54,24,22,55,25],[10,
|
||||
45,15,67,46,16],[19,148,118,6,149,119],[18,75,47,31,76,48],[34,54,24,34,55,25],[20,45,15,61,46,16]];p.getRSBlocks=function(a,c){var d=p.getRsBlockTable(a,c);if(void 0==d)throw Error("bad rs block @ typeNumber:"+a+"/errorCorrectLevel:"+c);for(var b=d.length/3,e=[],f=0;f<b;f++)for(var h=d[3*f+0],g=d[3*f+1],j=d[3*f+2],l=0;l<h;l++)e.push(new p(g,j));return e};p.getRsBlockTable=function(a,c){switch(c){case 1:return p.RS_BLOCK_TABLE[4*(a-1)+0];case 0:return p.RS_BLOCK_TABLE[4*(a-1)+1];case 3:return p.RS_BLOCK_TABLE[4*
|
||||
(a-1)+2];case 2:return p.RS_BLOCK_TABLE[4*(a-1)+3]}};t.prototype={get:function(a){return 1==(this.buffer[Math.floor(a/8)]>>>7-a%8&1)},put:function(a,c){for(var d=0;d<c;d++)this.putBit(1==(a>>>c-d-1&1))},getLengthInBits:function(){return this.length},putBit:function(a){var c=Math.floor(this.length/8);this.buffer.length<=c&&this.buffer.push(0);a&&(this.buffer[c]|=128>>>this.length%8);this.length++}};"string"===typeof h&&(h={text:h});h=r.extend({},{render:"canvas",width:256,height:256,typeNumber:-1,
|
||||
correctLevel:2,background:"#ffffff",foreground:"#000000"},h);return this.each(function(){var a;if("canvas"==h.render){a=new o(h.typeNumber,h.correctLevel);a.addData(h.text);a.make();var c=document.createElement("canvas");c.width=h.width;c.height=h.height;for(var d=c.getContext("2d"),b=h.width/a.getModuleCount(),e=h.height/a.getModuleCount(),f=0;f<a.getModuleCount();f++)for(var i=0;i<a.getModuleCount();i++){d.fillStyle=a.isDark(f,i)?h.foreground:h.background;var g=Math.ceil((i+1)*b)-Math.floor(i*b),
|
||||
j=Math.ceil((f+1)*b)-Math.floor(f*b);d.fillRect(Math.round(i*b),Math.round(f*e),g,j)}}else{a=new o(h.typeNumber,h.correctLevel);a.addData(h.text);a.make();c=r("<table></table>").css("width",h.width+"px").css("height",h.height+"px").css("border","0px").css("border-collapse","collapse").css("background-color",h.background);d=h.width/a.getModuleCount();b=h.height/a.getModuleCount();for(e=0;e<a.getModuleCount();e++){f=r("<tr></tr>").css("height",b+"px").appendTo(c);for(i=0;i<a.getModuleCount();i++)r("<td></td>").css("width",
|
||||
d+"px").css("background-color",a.isDark(e,i)?h.foreground:h.background).appendTo(f)}}a=c;jQuery(a).appendTo(this)})}})(jQuery);
|
@@ -54,9 +54,35 @@ function showMsg(msg, loader, timeout, ms, error) {
|
||||
}
|
||||
}
|
||||
|
||||
function doAjaxCall(url, elem, reload, form, callback) {
|
||||
function confirmAjaxCall(url, msg, loader_msg, callback) {
|
||||
$("#confirm-message").html(msg);
|
||||
$('#confirm-modal').modal();
|
||||
$('#confirm-modal').one('click', '#confirm-button', function () {
|
||||
if (loader_msg) {
|
||||
showMsg(loader_msg, true, false)
|
||||
}
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: 'POST',
|
||||
complete: function (xhr, status) {
|
||||
var result = $.parseJSON(xhr.responseText);
|
||||
var msg = result.message;
|
||||
if (result.result == 'success') {
|
||||
showMsg('<i class="fa fa-check"></i> ' + msg, false, true, 5000)
|
||||
} else {
|
||||
showMsg('<i class="fa fa-times"></i> ' + msg, false, true, 5000, true)
|
||||
}
|
||||
if (typeof callback === "function") {
|
||||
callback();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function doAjaxCall(url, elem, reload, form, showMsg, callback) {
|
||||
// Set Message
|
||||
feedback = $("#ajaxMsg");
|
||||
feedback = (showMsg) ? $("#ajaxMsg") : $();
|
||||
update = $("#updatebar");
|
||||
if (update.is(":visible")) {
|
||||
var height = update.height() + 35;
|
||||
@@ -230,6 +256,10 @@ function getPlatformImagePath(platformName) {
|
||||
return 'images/platforms/wp.png';
|
||||
} else if (platformName.indexOf("Plex Media Player") > -1) {
|
||||
return 'images/platforms/pmp.png';
|
||||
} else if (platformName.indexOf("PlexTogether") > -1) {
|
||||
return 'images/platforms/plextogether.png';
|
||||
} else if (platformName.indexOf("Linux") > -1) {
|
||||
return 'images/platforms/linux.png';
|
||||
} else {
|
||||
return 'images/platforms/default.png';
|
||||
}
|
||||
@@ -399,4 +429,44 @@ window.onerror = function (message, file, line) {
|
||||
'line': line
|
||||
};
|
||||
$.post("log_js_errors", e, function (data) { });
|
||||
};
|
||||
};
|
||||
|
||||
$('*').on('click', '.refresh_pms_image', function (e) {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
|
||||
var background_div = $(this).parent().siblings(['style*=pms_image_proxy']).first();
|
||||
var pms_proxy_url = background_div.css('background-image');
|
||||
pms_proxy_url = /^url\((['"]?)(.*)\1\)$/.exec(pms_proxy_url);
|
||||
pms_proxy_url = pms_proxy_url ? pms_proxy_url[2] : ""; // If matched, retrieve url, otherwise ""
|
||||
|
||||
if (pms_proxy_url.indexOf('pms_image_proxy') == -1) {
|
||||
console.log('PMS image proxy url not found.');
|
||||
} else {
|
||||
if (pms_proxy_url.indexOf('refresh=true') > -1) {
|
||||
pms_proxy_url = pms_proxy_url.replace("&refresh=true", "");
|
||||
console.log(pms_proxy_url)
|
||||
background_div.css('background-image', 'url(' + pms_proxy_url + ')');
|
||||
background_div.css('background-image', 'url(' + pms_proxy_url + '&refresh=true)');
|
||||
} else {
|
||||
background_div.css('background-image', 'url(' + pms_proxy_url + '&refresh=true)');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Taken from http://stackoverflow.com/questions/10420352/converting-file-size-in-bytes-to-human-readable#answer-14919494
|
||||
function humanFileSize(bytes, si) {
|
||||
var thresh = si ? 1000 : 1024;
|
||||
if (Math.abs(bytes) < thresh) {
|
||||
return bytes + ' B';
|
||||
}
|
||||
var units = si
|
||||
? ['kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
|
||||
: ['KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'];
|
||||
var u = -1;
|
||||
do {
|
||||
bytes /= thresh;
|
||||
++u;
|
||||
} while (Math.abs(bytes) >= thresh && u < units.length - 1);
|
||||
return bytes.toFixed(1) + ' ' + units[u];
|
||||
}
|
||||
|
@@ -35,7 +35,11 @@ history_table_options = {
|
||||
"targets": [0],
|
||||
"data": null,
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
$(td).html('<button class="btn btn-xs btn-warning" data-id="' + rowData['id'] + '"><i class="fa fa-trash-o fa-fw"></i> Delete</button>');
|
||||
if (rowData['id'] === null) {
|
||||
$(td).html('');
|
||||
} else {
|
||||
$(td).html('<button class="btn btn-xs btn-warning" data-id="' + rowData['id'] + '"><i class="fa fa-trash-o fa-fw"></i> Delete</button>');
|
||||
}
|
||||
},
|
||||
"width": "5%",
|
||||
"className": "delete-control no-wrap hidden",
|
||||
@@ -46,14 +50,21 @@ history_table_options = {
|
||||
"targets": [1],
|
||||
"data":"date",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (rowData['stopped'] === null) {
|
||||
$(td).html('Currently watching...');
|
||||
var date = moment(cellData, "X").format(date_format);
|
||||
if (rowData['state'] !== null) {
|
||||
var state = '';
|
||||
if (rowData['state'] === 'playing') {
|
||||
state = '<span class="current-activity-tooltip" data-toggle="tooltip" title="Currently Playing"><i class="fa fa-play fa-fw"></i></span>';
|
||||
} else if (rowData['state'] === 'paused') {
|
||||
state = '<span class="current-activity-tooltip" data-toggle="tooltip" title="Currently Paused"><i class="fa fa-pause fa-fw"></i></span>';
|
||||
} else if (rowData['state'] === 'buffering') {
|
||||
state = '<span class="current-activity-tooltip" data-toggle="tooltip" title="Currently Buffering"><i class="fa fa-spinner fa-fw"></i></span>';
|
||||
}
|
||||
$(td).html('<div><div style="float: left;">' + state + ' ' + date + '</div></div>');
|
||||
} else if (rowData['group_count'] > 1) {
|
||||
date = moment(cellData, "X").format(date_format);
|
||||
expand_history = '<span class="expand-history-tooltip" data-toggle="tooltip" title="Show Detailed History"><i class="fa fa-plus-circle fa-fw"></i></span>';
|
||||
$(td).html('<div><a href="#"><div style="float: left;">' + expand_history + ' ' + date + '</div></a></div>');
|
||||
} else {
|
||||
date = moment(cellData, "X").format(date_format);
|
||||
$(td).html('<div style="float: left;"><i class="fa fa-fw"></i> ' + date + '</div>');
|
||||
}
|
||||
},
|
||||
@@ -138,21 +149,22 @@ history_table_options = {
|
||||
var parent_info = '';
|
||||
var media_type = '';
|
||||
var thumb_popover = '';
|
||||
var source = (rowData['state'] === null) ? 'source=history&' : '';
|
||||
if (rowData['media_type'] === 'movie') {
|
||||
if (rowData['year']) { parent_info = ' (' + rowData['year'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Movie"><i class="fa fa-film fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
$(td).html('<div class="history-title"><a href="info?' + source + 'rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'episode') {
|
||||
if (rowData['parent_media_index'] && rowData['media_index']) { parent_info = ' (S' + rowData['parent_media_index'] + '· E' + rowData['media_index'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Episode"><i class="fa fa-television fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=450&fallback=poster" data-height="120" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
$(td).html('<div class="history-title"><a href="info?' + source + 'rating_key=' + rowData['rating_key'] + '"><div style="float: left;" >' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else if (rowData['media_type'] === 'track') {
|
||||
if (rowData['parent_title']) { parent_info = ' (' + rowData['parent_title'] + ')'; }
|
||||
media_type = '<span class="media-type-tooltip" data-toggle="tooltip" title="Track"><i class="fa fa-music fa-fw"></i></span>';
|
||||
thumb_popover = '<span class="thumb-tooltip" data-toggle="popover" data-img="pms_image_proxy?img=' + rowData['thumb'] + '&width=300&height=300&fallback=cover" data-height="80" data-width="80">' + cellData + parent_info + '</span>'
|
||||
$(td).html('<div class="history-title"><a href="info?source=history&rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
$(td).html('<div class="history-title"><a href="info?' + source + 'rating_key=' + rowData['rating_key'] + '"><div style="float: left;">' + media_type + ' ' + thumb_popover + '</div></a></div>');
|
||||
} else {
|
||||
$(td).html('<a href="info?rating_key=' + rowData['rating_key'] + '">' + cellData + '</a>');
|
||||
}
|
||||
@@ -241,6 +253,7 @@ history_table_options = {
|
||||
$('#ajaxMsg').fadeOut();
|
||||
|
||||
// Create the tooltips.
|
||||
$('.current-activity-tooltip').tooltip({ container: 'body' });
|
||||
$('.expand-history-tooltip').tooltip({ container: 'body' });
|
||||
$('.external-ip-tooltip').tooltip({ container: 'body' });
|
||||
$('.transcode-tooltip').tooltip({ container: 'body' });
|
||||
@@ -286,7 +299,7 @@ history_table_options = {
|
||||
if ($.inArray(rowData['id'], history_to_delete) !== -1) {
|
||||
$(row).find('button[data-id="' + rowData['id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
|
||||
}
|
||||
} else {
|
||||
} else if (rowData['id'] !== null) {
|
||||
// if grouped rows
|
||||
// toggle the parent button to danger
|
||||
$(row).find('button[data-id="' + rowData['id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger');
|
||||
@@ -306,6 +319,9 @@ history_table_options = {
|
||||
history_table.row(row).child(childTableFormat(rowData)).show();
|
||||
}
|
||||
|
||||
if (rowData['state'] !== null) {
|
||||
$(row).addClass('current-activity-row');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -318,7 +334,11 @@ $('.history_table').on('click', '> tbody > tr > td.modal-control', function () {
|
||||
function showStreamDetails() {
|
||||
$.ajax({
|
||||
url: 'get_stream_data',
|
||||
data: {row_id: rowData['id'], user: rowData['friendly_name']},
|
||||
data: {
|
||||
row_id: rowData['id'],
|
||||
session_key: rowData['session_key'],
|
||||
user: rowData['friendly_name']
|
||||
},
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function(xhr, status) {
|
||||
@@ -448,10 +468,10 @@ function childTableOptions(rowData) {
|
||||
|
||||
// Create the tooltips.
|
||||
$('.expand-history-tooltip').tooltip({ container: 'body' });
|
||||
$('.external-ip-tooltip').tooltip();
|
||||
$('.transcode-tooltip').tooltip();
|
||||
$('.media-type-tooltip').tooltip();
|
||||
$('.watched-tooltip').tooltip();
|
||||
$('.external-ip-tooltip').tooltip({ container: 'body' });
|
||||
$('.transcode-tooltip').tooltip({ container: 'body' });
|
||||
$('.media-type-tooltip').tooltip({ container: 'body' });
|
||||
$('.watched-tooltip').tooltip({ container: 'body' });
|
||||
$('.thumb-tooltip').popover({
|
||||
html: true,
|
||||
container: 'body',
|
||||
|
@@ -132,8 +132,8 @@ history_table_modal_options = {
|
||||
$('#ajaxMsg').fadeOut();
|
||||
|
||||
// Create the tooltips.
|
||||
$('.transcode-tooltip').tooltip();
|
||||
$('.media-type-tooltip').tooltip();
|
||||
$('.transcode-tooltip').tooltip({ container: 'body' });
|
||||
$('.media-type-tooltip').tooltip({ container: 'body' });
|
||||
$('.thumb-tooltip').popover({
|
||||
html: true,
|
||||
container: '#history-modal',
|
||||
|
@@ -217,10 +217,10 @@ libraries_list_table_options = {
|
||||
$('#ajaxMsg').fadeOut();
|
||||
|
||||
// Create the tooltips.
|
||||
$('.purge-tooltip').tooltip();
|
||||
$('.edit-tooltip').tooltip();
|
||||
$('.transcode-tooltip').tooltip();
|
||||
$('.media-type-tooltip').tooltip();
|
||||
$('.purge-tooltip').tooltip({ container: 'body' });
|
||||
$('.edit-tooltip').tooltip({ container: 'body' });
|
||||
$('.transcode-tooltip').tooltip({ container: 'body' });
|
||||
$('.media-type-tooltip').tooltip({ container: 'body' });
|
||||
$('.thumb-tooltip').popover({
|
||||
html: true,
|
||||
container: 'body',
|
||||
|
@@ -4,7 +4,7 @@ var time_format = 'hh:mm a';
|
||||
$.ajax({
|
||||
url: 'get_date_formats',
|
||||
type: 'GET',
|
||||
success: function(data) {
|
||||
success: function (data) {
|
||||
date_format = data.date_format;
|
||||
time_format = data.time_format;
|
||||
}
|
||||
@@ -16,10 +16,10 @@ media_info_table_options = {
|
||||
"destroy": true,
|
||||
"language": {
|
||||
"search": "Search: ",
|
||||
"lengthMenu":"Show _MENU_ entries per page",
|
||||
"info":"Showing _START_ to _END_ of _TOTAL_ library items",
|
||||
"infoEmpty":"Showing 0 to 0 of 0 entries",
|
||||
"infoFiltered":"<span class='hidden-md hidden-sm hidden-xs'>(filtered from _MAX_ total entries)</span>",
|
||||
"lengthMenu": "Show _MENU_ entries per page",
|
||||
"info": "Showing _START_ to _END_ of _TOTAL_ library items",
|
||||
"infoEmpty": "Showing 0 to 0 of 0 entries",
|
||||
"infoFiltered": "<span class='hidden-md hidden-sm hidden-xs'>(filtered from _MAX_ total entries)</span>",
|
||||
"emptyTable": "No data in table",
|
||||
"loadingRecords": '<i class="fa fa-refresh fa-spin"></i> Loading items...</div>'
|
||||
},
|
||||
@@ -28,7 +28,7 @@ media_info_table_options = {
|
||||
"processing": false,
|
||||
"serverSide": true,
|
||||
"pageLength": 25,
|
||||
"order": [ 1, 'asc'],
|
||||
"order": [1, 'asc'],
|
||||
"autoWidth": false,
|
||||
"scrollX": true,
|
||||
"columnDefs": [
|
||||
@@ -110,7 +110,7 @@ media_info_table_options = {
|
||||
},
|
||||
"width": "20%",
|
||||
"className": "no-wrap",
|
||||
},
|
||||
},
|
||||
{
|
||||
"targets": [2],
|
||||
"data": "container",
|
||||
@@ -194,7 +194,7 @@ media_info_table_options = {
|
||||
"data": "file_size",
|
||||
"createdCell": function (td, cellData, rowData, row, col) {
|
||||
if (cellData !== null && cellData !== '') {
|
||||
$(td).html(Math.round(cellData / Math.pow(1024, 2)).toString() + ' MiB');
|
||||
$(td).html(humanFileSize(cellData));
|
||||
} else {
|
||||
if (rowData['section_type'] != 'photo' && get_file_sizes != null) {
|
||||
get_file_sizes = true;
|
||||
@@ -280,10 +280,10 @@ media_info_table_options = {
|
||||
}
|
||||
|
||||
$("#media_info_table-SID-" + section_id + "_info").append('<span class="hidden-md hidden-sm hidden-xs"> with a total file size of ' +
|
||||
Math.round(settings.json.filtered_file_size / Math.pow(1024, 3)).toString() + ' GiB' +
|
||||
' (filtered from ' + Math.round(settings.json.total_file_size / Math.pow(1024, 3)).toString() + ' GiB)</span>');
|
||||
humanFileSize(settings.json.filtered_file_size) +
|
||||
' (filtered from ' + humanFileSize(settings.json.total_file_size) + ')</span>');
|
||||
},
|
||||
"preDrawCallback": function(settings) {
|
||||
"preDrawCallback": function (settings) {
|
||||
var msg = "<i class='fa fa-refresh fa-spin'></i> Fetching rows...";
|
||||
showMsg(msg, false, false, 0)
|
||||
},
|
||||
@@ -425,17 +425,17 @@ function childTableFormatMedia(rowData) {
|
||||
'<table id="media_info_child-' + rowData['rating_key'] + '" data-id="' + rowData['rating_key'] + '" width="100%">' +
|
||||
'<thead>' +
|
||||
'<tr>' +
|
||||
'<th align="left" id="added_at">Added At</th>' +
|
||||
'<th align="left" id="title">Title</th>' +
|
||||
'<th align="left" id="container">Container</th>' +
|
||||
'<th align="left" id="bitrate">Bitrate</th>' +
|
||||
'<th align="left" id="video_codec">Video Codec</th>' +
|
||||
'<th align="left" id="video_resolution">Video Resolution</th>' +
|
||||
'<th align="left" id="video_resolution">Video Framerate</th>' +
|
||||
'<th align="left" id="audio_codec">Audio Codec</th>' +
|
||||
'<th align="left" id="audio_channels">Audio Channels</th>' +
|
||||
'<th align="left" id="file_size">File Size</th>' +
|
||||
'<th align="left" id="last_played">Last Played</th>' +
|
||||
'<th align="left" id="added_at">Added At</th>' +
|
||||
'<th align="left" id="title">Title</th>' +
|
||||
'<th align="left" id="container">Container</th>' +
|
||||
'<th align="left" id="bitrate">Bitrate</th>' +
|
||||
'<th align="left" id="video_codec">Video Codec</th>' +
|
||||
'<th align="left" id="video_resolution">Video Resolution</th>' +
|
||||
'<th align="left" id="video_resolution">Video Framerate</th>' +
|
||||
'<th align="left" id="audio_codec">Audio Codec</th>' +
|
||||
'<th align="left" id="audio_channels">Audio Channels</th>' +
|
||||
'<th align="left" id="file_size">File Size</th>' +
|
||||
'<th align="left" id="last_played">Last Played</th>' +
|
||||
'<th align="left" id="total_plays">Total Plays</th>' +
|
||||
'</tr>' +
|
||||
'</thead>' +
|
||||
|
@@ -220,13 +220,14 @@ users_list_table_options = {
|
||||
$('#ajaxMsg').fadeOut();
|
||||
|
||||
// Create the tooltips.
|
||||
$('.purge-tooltip').tooltip();
|
||||
$('.edit-tooltip').tooltip();
|
||||
$('.transcode-tooltip').tooltip();
|
||||
$('.media-type-tooltip').tooltip();
|
||||
$('.watched-tooltip').tooltip();
|
||||
$('.purge-tooltip').tooltip({ container: 'body' });
|
||||
$('.edit-tooltip').tooltip({ container: 'body' });
|
||||
$('.transcode-tooltip').tooltip({ container: 'body' });
|
||||
$('.media-type-tooltip').tooltip({ container: 'body' });
|
||||
$('.watched-tooltip').tooltip({ container: 'body' });
|
||||
$('.thumb-tooltip').popover({
|
||||
html: true,
|
||||
container: 'body',
|
||||
trigger: 'hover',
|
||||
placement: 'right',
|
||||
template: '<div class="popover history-thumbnail-popover" role="tooltip"><div class="arrow" style="top: 50%;"></div><div class="popover-content"></div></div>',
|
||||
|
@@ -39,6 +39,9 @@ DOCUMENTATION :: END
|
||||
<div class="row">
|
||||
% if data['library_art']:
|
||||
<div class="art-face" style="background-image:url(pms_image_proxy?img=${data['library_art']}&width=1920&height=1080)"></div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image info-art" title="Refresh background image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
% endif
|
||||
<div class="summary-container">
|
||||
<div class="summary-navbar">
|
||||
@@ -362,7 +365,7 @@ DOCUMENTATION :: END
|
||||
|
||||
// Populate watch time stats
|
||||
$.ajax({
|
||||
url: 'get_library_watch_time_stats',
|
||||
url: 'library_watch_time_stats',
|
||||
async: true,
|
||||
data: { section_id: section_id },
|
||||
complete: function(xhr, status) {
|
||||
@@ -372,7 +375,7 @@ DOCUMENTATION :: END
|
||||
|
||||
// Populate user stats
|
||||
$.ajax({
|
||||
url: 'get_library_user_stats',
|
||||
url: 'library_user_stats',
|
||||
async: true,
|
||||
data: { section_id: section_id },
|
||||
complete: function(xhr, status) {
|
||||
@@ -498,7 +501,7 @@ DOCUMENTATION :: END
|
||||
function recentlyWatched() {
|
||||
// Populate recently watched
|
||||
$.ajax({
|
||||
url: 'get_library_recently_watched',
|
||||
url: 'library_recently_watched',
|
||||
async: true,
|
||||
data: {
|
||||
section_id: section_id,
|
||||
@@ -514,7 +517,7 @@ DOCUMENTATION :: END
|
||||
function recentlyAdded() {
|
||||
// Populate recently added
|
||||
$.ajax({
|
||||
url: 'get_library_recently_added',
|
||||
url: 'library_recently_added',
|
||||
async: true,
|
||||
data: {
|
||||
section_id: section_id,
|
||||
|
@@ -60,6 +60,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
|
@@ -21,7 +21,33 @@
|
||||
<span><i class="fa fa-list-alt"></i> Logs</span>
|
||||
</div>
|
||||
<div class="button-bar">
|
||||
<button class="btn btn-dark" id="download-plexpylog"><i class="fa fa-download"></i> Download log</button>
|
||||
<div class="btn-group" id="plexpy-log-levels">
|
||||
<label>
|
||||
<select name="plexpy-log-level-filter" id="plexpy-log-level-filter" class="btn" style="color: inherit;">
|
||||
<option value="">All log levels</option>
|
||||
<option disabled>────────────</option>
|
||||
<option value="DEBUG">Debug</option>
|
||||
<option value="INFO">Info</option>
|
||||
<option value="WARNING">Warning</option>
|
||||
<option value="ERROR">Error</option>
|
||||
</select>
|
||||
</label>
|
||||
</div>
|
||||
<div class="btn-group" id="plex-log-levels" style="display: none;">
|
||||
<label>
|
||||
<select name="plex-log-level-filter" id="plex-log-level-filter" class="btn" style="color: inherit;">
|
||||
<option value="">All log levels</option>
|
||||
<option disabled>────────────</option>
|
||||
<option value="DEBUG">Debug</option>
|
||||
<option value="INFO">Info</option>
|
||||
<option value="WARN">Warning</option>
|
||||
<option value="ERROR">Error</option>
|
||||
</select>
|
||||
</label>
|
||||
</div>
|
||||
<button class="btn btn-dark" id="download-plexpylog"><i class="fa fa-download"></i> Download logs</button>
|
||||
<button class="btn btn-dark" id="download-plexserverlog" style="display: none;"><i class="fa fa-download"></i> Download logs</button>
|
||||
<button class="btn btn-dark" id="download-plexscannerlog" style="display: none;"><i class="fa fa-download"></i> Download logs</button>
|
||||
<button class="btn btn-dark" id="clear-logs"><i class="fa fa-trash-o"></i> Clear logs</button>
|
||||
<button class="btn btn-dark" id="clear-notify-logs" style="display: none;"><i class="fa fa-trash-o"></i> Clear logs</button>
|
||||
<button class="btn btn-dark" id="clear-login-logs" style="display: none;"><i class="fa fa-trash-o"></i> Clear logs</button>
|
||||
@@ -40,27 +66,25 @@
|
||||
<div role="tabpanel" class="tab-pane active" id="tabs-1">
|
||||
<table class="display" id="log_table" width="100%">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="min-tablet" align="left" id="timestamp">Timestamp</th>
|
||||
<th class="desktop" align="left" id="level">Level</th>
|
||||
<th class="all" align="left" id="message">Message</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th class="min-tablet" align="left" id="timestamp">Timestamp</th>
|
||||
<th class="desktop" align="left" id="level">Level</th>
|
||||
<th class="all" align="left" id="message">Message</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
</tbody>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-2">
|
||||
<table class="display" id="plex_log_table" width="100%">
|
||||
<thead>
|
||||
<tr>
|
||||
<th align="left" id="plex_timestamp">Timestamp</th>
|
||||
<th align="left" id="plex_level">Level</th>
|
||||
<th align="left" id="plex_message">Message</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th align="left" id="plex_timestamp">Timestamp</th>
|
||||
<th align="left" id="plex_level">Level</th>
|
||||
<th align="left" id="plex_message">Message</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
</tbody>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-3">
|
||||
@@ -114,7 +138,8 @@
|
||||
</div>
|
||||
|
||||
<br>
|
||||
<div align="center">Refresh rate:
|
||||
<div align="center">
|
||||
Refresh rate:
|
||||
<select id="refreshrate" onchange="setRefresh()">
|
||||
<option value="0" selected="selected">No Refresh</option>
|
||||
<option value="5">5 Seconds</option>
|
||||
@@ -139,21 +164,62 @@
|
||||
<script>
|
||||
|
||||
$(document).ready(function() {
|
||||
loadPlexPyLogs();
|
||||
loadPlexPyLogs(selected_log_level);
|
||||
clearSearchButton('log_table', log_table);
|
||||
});
|
||||
|
||||
function loadPlexPyLogs() {
|
||||
var log_levels = ['DEBUG', 'INFO', 'WARN', 'ERROR'];
|
||||
|
||||
function bindLogLevelFilter() {
|
||||
clearLogLevelFilter();
|
||||
var log_level_column = this.api().column(1);
|
||||
var select = $('#plex-log-level-filter');
|
||||
select.on('change', function () {
|
||||
var val = $.fn.dataTable.util.escapeRegex(
|
||||
$(this).val()
|
||||
);
|
||||
var search_string = '';
|
||||
var levelIndex = log_levels.indexOf(val);
|
||||
if (levelIndex >= 0) {
|
||||
search_string = '^' + log_levels
|
||||
.slice(levelIndex)
|
||||
.join('|') + '$';
|
||||
}
|
||||
log_level_column
|
||||
.search(search_string, true, false)
|
||||
.draw();
|
||||
}).change();
|
||||
}
|
||||
|
||||
function clearLogLevelFilter() {
|
||||
$('#plex-log-level-filter').off('change');
|
||||
}
|
||||
|
||||
var selected_log_level = null;
|
||||
function loadPlexPyLogs(selected_log_level) {
|
||||
log_table_options.ajax = {
|
||||
url: "getLog"
|
||||
url: "get_log",
|
||||
type: 'post',
|
||||
data: function (d) {
|
||||
return {
|
||||
json_data: JSON.stringify(d),
|
||||
log_level: selected_log_level
|
||||
};
|
||||
}
|
||||
}
|
||||
log_table = $('#log_table').DataTable(log_table_options);
|
||||
|
||||
$('#plexpy-log-level-filter').on('change', function () {
|
||||
selected_log_level = $(this).val() || null;
|
||||
log_table.draw();
|
||||
});
|
||||
}
|
||||
|
||||
function loadPlexLogs() {
|
||||
plex_log_table_options.ajax = {
|
||||
url: "get_plex_log?log_type=server"
|
||||
}
|
||||
plex_log_table_options.initComplete = bindLogLevelFilter;
|
||||
plex_log_table = $('#plex_log_table').DataTable(plex_log_table_options);
|
||||
}
|
||||
|
||||
@@ -161,6 +227,7 @@
|
||||
plex_log_table_options.ajax = {
|
||||
url: "get_plex_log?log_type=scanner"
|
||||
}
|
||||
plex_log_table_options.initComplete = bindLogLevelFilter;
|
||||
plex_scanner_log_table = $('#plex_scanner_log_table').DataTable(plex_log_table_options);
|
||||
}
|
||||
|
||||
@@ -190,17 +257,25 @@
|
||||
}
|
||||
|
||||
$("#plexpy-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").show();
|
||||
$("#plex-log-levels").hide();
|
||||
$("#clear-logs").show();
|
||||
$("#download-plexpylog").show()
|
||||
$("#download-plexserverlog").hide()
|
||||
$("#download-plexscannerlog").hide()
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadPlexPyLogs();
|
||||
loadPlexPyLogs(selected_log_level);
|
||||
clearSearchButton('log_table', log_table);
|
||||
});
|
||||
|
||||
$("#plex-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").hide();
|
||||
$("#plex-log-levels").show();
|
||||
$("#clear-logs").hide();
|
||||
$("#download-plexpylog").hide()
|
||||
$("#download-plexserverlog").show()
|
||||
$("#download-plexscannerlog").hide()
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadPlexLogs();
|
||||
@@ -208,8 +283,12 @@
|
||||
});
|
||||
|
||||
$("#plex-scanner-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").hide();
|
||||
$("#plex-log-levels").show();
|
||||
$("#clear-logs").hide();
|
||||
$("#download-plexpylog").hide()
|
||||
$("#download-plexserverlog").hide()
|
||||
$("#download-plexscannerlog").show()
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-login-logs").hide();
|
||||
loadPlexScannerLogs();
|
||||
@@ -217,8 +296,12 @@
|
||||
});
|
||||
|
||||
$("#notification-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").hide();
|
||||
$("#plex-log-levels").hide();
|
||||
$("#clear-logs").hide();
|
||||
$("#download-plexpylog").hide()
|
||||
$("#download-plexserverlog").hide()
|
||||
$("#download-plexscannerlog").hide()
|
||||
$("#clear-notify-logs").show();
|
||||
$("#clear-login-logs").hide();
|
||||
loadNotificationLogs();
|
||||
@@ -226,8 +309,12 @@
|
||||
});
|
||||
|
||||
$("#login-logs-btn").click(function () {
|
||||
$("#plexpy-log-levels").hide();
|
||||
$("#plex-log-levels").hide();
|
||||
$("#clear-logs").hide();
|
||||
$("#download-plexpylog").hide()
|
||||
$("#download-plexserverlog").hide()
|
||||
$("#download-plexscannerlog").hide()
|
||||
$("#clear-notify-logs").hide();
|
||||
$("#clear-login-logs").show();
|
||||
loadLoginLogs();
|
||||
@@ -263,6 +350,13 @@
|
||||
window.location.href = "download_log";
|
||||
});
|
||||
|
||||
$("#download-plexserverlog").click(function () {
|
||||
window.location.href = "download_plex_log?log_type=server";
|
||||
});
|
||||
|
||||
$("#download-plexscannerlog").click(function () {
|
||||
window.location.href = "download_plex_log?log_type=scanner";
|
||||
});
|
||||
|
||||
$("#clear-notify-logs").click(function () {
|
||||
$("#confirm-message").text("Are you sure you want to clear the PlexPy notification logs?");
|
||||
|
@@ -148,7 +148,7 @@
|
||||
|
||||
$('#save-notification-item').click(function () {
|
||||
// Reload modal to update certain fields
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true, reloadModal);
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true, true, reloadModal);
|
||||
return false;
|
||||
});
|
||||
|
||||
@@ -176,7 +176,7 @@
|
||||
})
|
||||
|
||||
$('#test_notifier').click(function () {
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true, sendTestNotification);
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true, false, sendTestNotification);
|
||||
});
|
||||
|
||||
function sendTestNotification() {
|
||||
@@ -217,9 +217,9 @@
|
||||
}
|
||||
}
|
||||
|
||||
$('#pushbullet_apikey, #pushover_apitoken, #scripts_folder').on('change', function () {
|
||||
$('#pushbullet_apikey, #pushover_apitoken, #scripts_folder, #join_apikey').on('change', function () {
|
||||
// Reload modal to update certain fields
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true, reloadModal);
|
||||
doAjaxCall('set_notification_config', $(this), 'tabs', true, false, reloadModal);
|
||||
return false;
|
||||
});
|
||||
|
||||
|
@@ -57,6 +57,20 @@
|
||||
</label>
|
||||
<p class="help-block">Trigger notification when a media item triggers the defined buffer threshold.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_concurrent" ${helpers.checked(data['on_concurrent'])} class="toggle-switches">
|
||||
Notify on user concurrent streams
|
||||
</label>
|
||||
<p class="help-block">Trigger notification when a user has concurrent streams.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_newdevice" ${helpers.checked(data['on_newdevice'])} class="toggle-switches">
|
||||
Notify on user new device
|
||||
</label>
|
||||
<p class="help-block">Trigger notification when a user streams from a new device.</p>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-size="small" data-id="${data['id']}" data-config-name="${data['config_prefix']}_on_created" ${helpers.checked(data['on_created'])} class="toggle-switches">
|
||||
|
@@ -49,6 +49,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
@@ -69,6 +72,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
@@ -91,6 +97,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
@@ -109,6 +118,5 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
% else:
|
||||
<div class="text-muted">There was an error communicating with your Plex Server. Please check your <a href="settings">settings</a>.
|
||||
</div><br>
|
||||
<div class="text-muted">There was an error communicating with your Plex Server.</div><br>
|
||||
% endif
|
@@ -47,7 +47,7 @@
|
||||
|
||||
// Redirect to home page after countdown.
|
||||
function reloadPage() {
|
||||
window.location.href = "index";
|
||||
window.location.href = "${new_http_root}index";
|
||||
}
|
||||
</script>
|
||||
</%def>
|
||||
|
@@ -53,13 +53,15 @@ DOCUMENTATION :: END
|
||||
<div class="modal-body">
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<h4><strong>Stream Details</strong></h4>
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Stream Details</strong></h4>
|
||||
</div>
|
||||
<div class="col-sm-4">
|
||||
<h5>Media</h5>
|
||||
<ul class="list-unstyled">
|
||||
<li>Container: <strong>${data['transcode_container'] if data['transcode_container'] else data['container']}</strong></li>
|
||||
% if data['media_type'] != 'track':
|
||||
<li>Resolution: <strong>${data['video_resolution'] + 'p' if data['video_resolution'] != 'sd' else data['video_resolution']}</strong></li>
|
||||
<li>Resolution: <strong>${data['transcode_height'] if data['transcode_height'] else data['height']}p</strong></li>
|
||||
% endif
|
||||
</ul>
|
||||
</div>
|
||||
@@ -95,13 +97,15 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<h4><strong>Source Details</strong></h4>
|
||||
<div class="col-sm-12">
|
||||
<h4><strong>Source Details</strong></h4>
|
||||
</div>
|
||||
<div class="col-sm-4">
|
||||
<h5>Media</h5>
|
||||
<ul class="list-unstyled">
|
||||
<li>Container: <strong>${data['container']}</strong></li>
|
||||
% if data['media_type'] != 'track':
|
||||
<li>Resolution: <strong>${data['height']}p</strong></li>
|
||||
<li>Resolution: <strong>${data['video_resolution'] + 'p' if data['video_resolution'] != 'sd' else data['video_resolution']}</strong></li>
|
||||
% endif
|
||||
<li>Bitrate: <strong>${data['bitrate']} kbps</strong></li>
|
||||
</ul>
|
||||
|
@@ -383,7 +383,7 @@ DOCUMENTATION :: END
|
||||
|
||||
// Populate watch time stats
|
||||
$.ajax({
|
||||
url: 'get_user_watch_time_stats',
|
||||
url: 'user_watch_time_stats',
|
||||
async: true,
|
||||
data: { user_id: user_id, user: username },
|
||||
complete: function(xhr, status) {
|
||||
@@ -393,7 +393,7 @@ DOCUMENTATION :: END
|
||||
|
||||
// Populate platform stats
|
||||
$.ajax({
|
||||
url: 'get_user_player_stats',
|
||||
url: 'user_player_stats',
|
||||
async: true,
|
||||
data: { user_id: user_id, user: username },
|
||||
complete: function(xhr, status) {
|
||||
|
@@ -49,6 +49,9 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
% if _session['user_group'] == 'admin':
|
||||
<span class="overlay-refresh-image" title="Refresh image"><i class="fa fa-refresh refresh_pms_image"></i></span>
|
||||
% endif
|
||||
</div>
|
||||
</a>
|
||||
<div class="dashboard-recent-media-metacontainer">
|
||||
|
@@ -418,7 +418,7 @@
|
||||
if ((pms_username !== '') && (pms_password !== '')) {
|
||||
$.ajax({
|
||||
type: 'GET',
|
||||
url: 'get_pms_token',
|
||||
url: 'get_plexpy_pms_token',
|
||||
data: {
|
||||
username: pms_username,
|
||||
password: pms_password
|
||||
@@ -426,15 +426,17 @@
|
||||
cache: false,
|
||||
async: true,
|
||||
complete: function (xhr, status) {
|
||||
var authToken = $.parseJSON(xhr.responseText);
|
||||
if (authToken) {
|
||||
$("#pms-token-status").html('<i class="fa fa-check"></i> Authentation successful!');
|
||||
var result = $.parseJSON(xhr.responseText);
|
||||
var msg = result.message;
|
||||
if (result.result == 'success') {
|
||||
var authToken = result.token;
|
||||
$("#pms-token-status").html('<i class="fa fa-check"></i> ' + msg);
|
||||
$('#pms-token-status').fadeIn('fast');
|
||||
$("#pms_token").val(authToken);
|
||||
authenticated = true;
|
||||
getServerOptions(authToken)
|
||||
} else {
|
||||
$("#pms-token-status").html('<i class="fa fa-exclamation-circle"></i> Invalid username or password.');
|
||||
$("#pms-token-status").html('<i class="fa fa-exclamation-circle"></i> ' + msg);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@@ -1,7 +1,7 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# PROVIDE: plexpy
|
||||
# REQUIRE: sabnzbd
|
||||
# REQUIRE: plexpy
|
||||
# KEYWORD: shutdown
|
||||
#
|
||||
# Add the following lines to /etc/rc.conf.local or /etc/rc.conf
|
||||
@@ -10,7 +10,7 @@
|
||||
# plexpy_enable (bool): Set to NO by default.
|
||||
# Set it to YES to enable it.
|
||||
# plexpy_user: The user account PlexPy daemon runs as what
|
||||
# you want it to be. It uses '_sabnzbd' user by
|
||||
# you want it to be. It uses 'plexpy' user by
|
||||
# default. Do not sets it as empty or it will run
|
||||
# as root.
|
||||
# plexpy_dir: Directory where PlexPy lives.
|
||||
@@ -28,7 +28,7 @@ rcvar=${name}_enable
|
||||
load_rc_config ${name}
|
||||
|
||||
: ${plexpy_enable:="NO"}
|
||||
: ${plexpy_user:="_sabnzbd"}
|
||||
: ${plexpy_user:="plexpy"}
|
||||
: ${plexpy_dir:="/usr/local/plexpy"}
|
||||
: ${plexpy_chdir:="${plexpy_dir}"}
|
||||
: ${plexpy_pid:="${plexpy_dir}/plexpy.pid"}
|
||||
|
@@ -1,66 +0,0 @@
|
||||
# PlexPy - Stats for Plex Media Server usage
|
||||
#
|
||||
# Service Unit file for systemd system manager
|
||||
#
|
||||
# INSTALLATION NOTES
|
||||
#
|
||||
# 1. Rename this file as you want, ensuring that it ends in .service
|
||||
# e.g. 'plexpy.service'
|
||||
#
|
||||
# 2. Adjust configuration settings as required. More details in the
|
||||
# "CONFIGURATION NOTES" section shown below.
|
||||
#
|
||||
# 3. Copy this file into your systemd service unit directory, which is
|
||||
# often '/lib/systemd/system'.
|
||||
#
|
||||
# 4. Create any files/directories that you specified back in step #2.
|
||||
# e.g. '/etc/plexpy/plexpy.ini'
|
||||
# '/home/sabnzbd/.plexpy'
|
||||
#
|
||||
# 5. Enable boot-time autostart with the following commands:
|
||||
# systemctl daemon-reload
|
||||
# systemctl enable plexpy.service
|
||||
#
|
||||
# 6. Start now with the following command:
|
||||
# systemctl start plexpy.service
|
||||
#
|
||||
# 7. If troubleshooting startup-errors, start by checking permissions
|
||||
# and ownership on the files/directories that you created in step #4.
|
||||
#
|
||||
#
|
||||
# CONFIGURATION NOTES
|
||||
#
|
||||
# - The example settings in this file assume that:
|
||||
# 1. You will run PlexPy as user/group: sabnzbd.sabnzbd
|
||||
# 2. You will either have PlexPy installed as a subdirectory
|
||||
# under '~sabnzbd', or that you will have a symlink under
|
||||
# '~/sabnzbd' pointing to your PlexPy install dir.
|
||||
# 3. Your PlexPy data directory and configuration file will be
|
||||
# in separate locations from your PlexPy install dir, to
|
||||
# simplify updates.
|
||||
#
|
||||
# - Option names (e.g. ExecStart=, Type=) appear to be case-sensitive)
|
||||
#
|
||||
# - Adjust ExecStart= to point to:
|
||||
# 1. Your PlexPy executable,
|
||||
# 2. Your config file (recommended is to put it somewhere in /etc)
|
||||
# 3. Your datadir (recommended is to NOT put it in your PlexPy exec dir)
|
||||
#
|
||||
# - Adjust User= and Group= to the user/group you want PlexPy to run as.
|
||||
#
|
||||
# - WantedBy= specifies which target (i.e. runlevel) to start PlexPy for.
|
||||
# multi-user.target equates to runlevel 3 (multi-user text mode)
|
||||
# graphical.target equates to runlevel 5 (multi-user X11 graphical mode)
|
||||
|
||||
[Unit]
|
||||
Description=PlexPy - Stats for Plex Media Server usage
|
||||
|
||||
[Service]
|
||||
ExecStart=/home/sabnzbd/plexpy/PlexPy.py --daemon --config /etc/plexpy/plexpy.ini --datadir /home/sabnzbd/.plexpy --nolaunch --quiet
|
||||
GuessMainPID=no
|
||||
Type=forking
|
||||
User=sabnzbd
|
||||
Group=sabnzbd
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
@@ -1,7 +1,7 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# PROVIDE: plexpy
|
||||
# REQUIRE: DAEMON sabnzbd
|
||||
# REQUIRE: DAEMON plexpy
|
||||
# KEYWORD: shutdown
|
||||
#
|
||||
# Add the following lines to /etc/rc.conf.local or /etc/rc.conf
|
||||
@@ -10,11 +10,11 @@
|
||||
# plexpy_enable (bool): Set to NO by default.
|
||||
# Set it to YES to enable it.
|
||||
# plexpy_user: The user account PlexPy daemon runs as what
|
||||
# you want it to be. It uses '_sabnzbd' user by
|
||||
# you want it to be. It uses 'plexpy' user by
|
||||
# default. Do not sets it as empty or it will run
|
||||
# as root.
|
||||
# plexpy_dir: Directory where PlexPy lives.
|
||||
# Default: /usr/local/plexpy
|
||||
# Default: /usr/local/share/plexpy
|
||||
# plexpy_chdir: Change to this directory before running PlexPy.
|
||||
# Default is same as plexpy_dir.
|
||||
# plexpy_pid: The name of the pidfile to create.
|
||||
@@ -29,8 +29,8 @@ rcvar=${name}_enable
|
||||
load_rc_config ${name}
|
||||
|
||||
: ${plexpy_enable:="NO"}
|
||||
: ${plexpy_user:="_sabnzbd"}
|
||||
: ${plexpy_dir:="/usr/local/plexpy"}
|
||||
: ${plexpy_user:="plexpy"}
|
||||
: ${plexpy_dir:="/usr/local/share/plexpy"}
|
||||
: ${plexpy_chdir:="${plexpy_dir}"}
|
||||
: ${plexpy_pid:="${plexpy_dir}/plexpy.pid"}
|
||||
: ${plexpy_flags:=""}
|
||||
@@ -49,23 +49,33 @@ fi
|
||||
|
||||
verify_plexpy_pid() {
|
||||
# Make sure the pid corresponds to the PlexPy process.
|
||||
pid=`cat ${plexpy_pid} 2>/dev/null`
|
||||
ps -p ${pid} | grep -q "python ${plexpy_dir}/PlexPy.py"
|
||||
return $?
|
||||
if [ -f ${plexpy_pid} ]; then
|
||||
pid=`cat ${plexpy_pid} 2>/dev/null`
|
||||
ps -p ${pid} | grep -q "python2 ${plexpy_dir}/PlexPy.py"
|
||||
return $?
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# Try to stop PlexPy cleanly by calling shutdown over http.
|
||||
# Try to stop PlexPy cleanly by sending SIGTERM
|
||||
plexpy_stop() {
|
||||
echo "Stopping $name"
|
||||
verify_plexpy_pid
|
||||
if [ -n "${pid}" ]; then
|
||||
kill ${pid}
|
||||
wait_for_pids ${pid}
|
||||
echo "Stopped"
|
||||
echo "Stopped."
|
||||
fi
|
||||
}
|
||||
|
||||
plexpy_status() {
|
||||
verify_plexpy_pid && echo "$name is running as ${pid}" || echo "$name is not running"
|
||||
verify_plexpy_pid
|
||||
if [ -n "${pid}" ]; then
|
||||
echo "$name is running as ${pid}."
|
||||
else
|
||||
echo "$name is not running."
|
||||
fi
|
||||
}
|
||||
|
||||
run_rc_command "$1"
|
||||
|
@@ -1,7 +1,7 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# PROVIDE: plexpy
|
||||
# REQUIRE: DAEMON sabnzbd
|
||||
# REQUIRE: DAEMON plexpy
|
||||
# KEYWORD: shutdown
|
||||
#
|
||||
# Add the following lines to /etc/rc.conf.local or /etc/rc.conf
|
||||
@@ -10,11 +10,11 @@
|
||||
# plexpy_enable (bool): Set to NO by default.
|
||||
# Set it to YES to enable it.
|
||||
# plexpy_user: The user account PlexPy daemon runs as what
|
||||
# you want it to be. It uses '_sabnzbd' user by
|
||||
# you want it to be. It uses 'plexpy' user by
|
||||
# default. Do not sets it as empty or it will run
|
||||
# as root.
|
||||
# plexpy_dir: Directory where PlexPy lives.
|
||||
# Default: /usr/local/plexpy
|
||||
# Default: /usr/local/share/plexpy
|
||||
# plexpy_chdir: Change to this directory before running PlexPy.
|
||||
# Default is same as plexpy_dir.
|
||||
# plexpy_pid: The name of the pidfile to create.
|
||||
@@ -29,7 +29,7 @@ rcvar=${name}_enable
|
||||
load_rc_config ${name}
|
||||
|
||||
: ${plexpy_enable:="NO"}
|
||||
: ${plexpy_user:="_sabnzbd"}
|
||||
: ${plexpy_user:="plexpy"}
|
||||
: ${plexpy_dir:="/usr/local/share/plexpy"}
|
||||
: ${plexpy_chdir:="${plexpy_dir}"}
|
||||
: ${plexpy_pid:="${plexpy_dir}/plexpy.pid"}
|
||||
@@ -58,7 +58,7 @@ verify_plexpy_pid() {
|
||||
fi
|
||||
}
|
||||
|
||||
# Try to stop PlexPy cleanly by calling shutdown over http.
|
||||
# Try to stop PlexPy cleanly by sending SIGTERM
|
||||
plexpy_stop() {
|
||||
echo "Stopping $name."
|
||||
verify_plexpy_pid
|
||||
|
@@ -1,67 +0,0 @@
|
||||
# PlexPy - Stats for Plex Media Server usage
|
||||
#
|
||||
# Service Unit file for systemd system manager
|
||||
#
|
||||
# INSTALLATION NOTES
|
||||
#
|
||||
# 1. Rename this file as you want, ensuring that it ends in .service
|
||||
# e.g. 'plexpy.service'
|
||||
#
|
||||
# 2. Adjust configuration settings as required. More details in the
|
||||
# "CONFIGURATION NOTES" section shown below.
|
||||
#
|
||||
# 3. Copy this file into your systemd service unit directory, which is
|
||||
# often '/lib/systemd/system'.
|
||||
#
|
||||
# 4. Create any files/directories that you specified back in step #2.
|
||||
# e.g. '/opt/plexpy.ini'
|
||||
# '/opt/plexpy'
|
||||
#
|
||||
# 5. Enable boot-time autostart with the following commands:
|
||||
# systemctl daemon-reload
|
||||
# systemctl enable plexpy.service
|
||||
#
|
||||
# 6. Start now with the following command:
|
||||
# systemctl start plexpy.service
|
||||
#
|
||||
# 7. If troubleshooting startup-errors, start by checking permissions
|
||||
# and ownership on the files/directories that you created in step #4.
|
||||
#
|
||||
#
|
||||
# CONFIGURATION NOTES
|
||||
#
|
||||
# - The example settings in this file assume that:
|
||||
# 1. You will run PlexPy as user/group: plex.users
|
||||
# 2. You will either have PlexPy installed as a subdirectory
|
||||
# under '/opt', or that you will have a symlink under
|
||||
# '/opt' pointing to your PlexPy install dir.
|
||||
# 3. Your PlexPy data directory and configuration file can be
|
||||
# in separate locations from your PlexPy install dir, to
|
||||
# simplify updates. However, in the example below they are in the
|
||||
# PlexPy install dir.
|
||||
#
|
||||
# - Option names (e.g. ExecStart=, Type=) appear to be case-sensitive)
|
||||
#
|
||||
# - Adjust ExecStart= to point to:
|
||||
# 1. Your PlexPy executable,
|
||||
# 2. Your config file (recommended is to put it somewhere in /etc)
|
||||
# 3. Your datadir (recommended is to NOT put it in your PlexPy exec dir)
|
||||
#
|
||||
# - Adjust User= and Group= to the user/group you want PlexPy to run as.
|
||||
#
|
||||
# - WantedBy= specifies which target (i.e. runlevel) to start PlexPy for.
|
||||
# multi-user.target equates to runlevel 3 (multi-user text mode)
|
||||
# graphical.target equates to runlevel 5 (multi-user X11 graphical mode)
|
||||
|
||||
[Unit]
|
||||
Description=PlexPy - Stats for Plex Media Server usage
|
||||
|
||||
[Service]
|
||||
ExecStart=/opt/plexpy/PlexPy.py --daemon --config /opt/plexpy/config.ini --datadir /opt/plexpy --nolaunch --quiet
|
||||
GuessMainPID=no
|
||||
Type=forking
|
||||
User=plex
|
||||
Group=users
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
@@ -19,7 +19,7 @@
|
||||
</dependency>
|
||||
|
||||
<method_context>
|
||||
<method_credential user="sabnzbd" group="sabnzbd"/>
|
||||
<method_credential user="plexpy" group="nogroup"/>
|
||||
</method_context>
|
||||
|
||||
<exec_method type="method" name="start" exec="python /opt/plexpy/PlexPy.py --daemon --quiet --nolaunch" timeout_seconds="60"/>
|
||||
|
@@ -13,32 +13,20 @@
|
||||
# 3. Copy this file into your systemd service unit directory, which is
|
||||
# often '/lib/systemd/system'.
|
||||
#
|
||||
# 4. Create any files/directories that you specified back in step #2.
|
||||
# e.g. '/etc/plexpy/plexpy.ini'
|
||||
# '/home/sabnzbd/.plexpy'
|
||||
#
|
||||
# 5. Enable boot-time autostart with the following commands:
|
||||
# 4. Enable boot-time autostart with the following commands:
|
||||
# systemctl daemon-reload
|
||||
# systemctl enable plexpy.service
|
||||
#
|
||||
# 6. Start now with the following command:
|
||||
# 5. Start now with the following command:
|
||||
# systemctl start plexpy.service
|
||||
#
|
||||
# 7. If troubleshooting startup-errors, start by checking permissions
|
||||
# and ownership on the files/directories that you created in step #4.
|
||||
#
|
||||
#
|
||||
# CONFIGURATION NOTES
|
||||
#
|
||||
# - The example settings in this file assume that:
|
||||
# 1. You will run PlexPy as user/group: sabnzbd.sabnzbd
|
||||
# 2. You will either have PlexPy installed as a subdirectory
|
||||
# under '~sabnzbd', or that you will have a symlink under
|
||||
# '~/sabnzbd' pointing to your PlexPy install dir.
|
||||
# 3. Your PlexPy data directory and configuration file will be
|
||||
# in separate locations from your PlexPy install dir, to
|
||||
# simplify updates.
|
||||
#
|
||||
# - The example settings in this file assume that you will run PlexPy as user: plexpy
|
||||
# - To create this user and give it ownership of the plexpy directory:
|
||||
# sudo adduser --system --no-create-home plexpy
|
||||
# sudo chown plexpy:nogroup -R /opt/plexpy
|
||||
#
|
||||
# - Option names (e.g. ExecStart=, Type=) appear to be case-sensitive)
|
||||
#
|
||||
# - Adjust ExecStart= to point to:
|
||||
@@ -63,4 +51,4 @@ User=plexpy
|
||||
Group=nogroup
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
WantedBy=multi-user.target
|
@@ -195,7 +195,7 @@ def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
||||
if not base:
|
||||
base = request.headers.get('Host', '127.0.0.1')
|
||||
port = request.local.port
|
||||
if port != 80:
|
||||
if port != 80 and not base.endswith(':%s' % port):
|
||||
base += ':%s' % port
|
||||
|
||||
if base.find("://") == -1:
|
||||
|
54
lib/dns/__init__.py
Normal file
@@ -0,0 +1,54 @@
|
||||
# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""dnspython DNS toolkit"""
|
||||
|
||||
__all__ = [
|
||||
'dnssec',
|
||||
'e164',
|
||||
'edns',
|
||||
'entropy',
|
||||
'exception',
|
||||
'flags',
|
||||
'hash',
|
||||
'inet',
|
||||
'ipv4',
|
||||
'ipv6',
|
||||
'message',
|
||||
'name',
|
||||
'namedict',
|
||||
'node',
|
||||
'opcode',
|
||||
'query',
|
||||
'rcode',
|
||||
'rdata',
|
||||
'rdataclass',
|
||||
'rdataset',
|
||||
'rdatatype',
|
||||
'renderer',
|
||||
'resolver',
|
||||
'reversename',
|
||||
'rrset',
|
||||
'set',
|
||||
'tokenizer',
|
||||
'tsig',
|
||||
'tsigkeyring',
|
||||
'ttl',
|
||||
'rdtypes',
|
||||
'update',
|
||||
'version',
|
||||
'wiredata',
|
||||
'zone',
|
||||
]
|
21
lib/dns/_compat.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import sys
|
||||
|
||||
|
||||
if sys.version_info > (3,):
|
||||
long = int
|
||||
xrange = range
|
||||
else:
|
||||
long = long
|
||||
xrange = xrange
|
||||
|
||||
# unicode / binary types
|
||||
if sys.version_info > (3,):
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
string_types = (str,)
|
||||
unichr = chr
|
||||
else:
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
string_types = (basestring,)
|
||||
unichr = unichr
|
457
lib/dns/dnssec.py
Normal file
@@ -0,0 +1,457 @@
|
||||
# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""Common DNSSEC-related functions and constants."""
|
||||
|
||||
from io import BytesIO
|
||||
import struct
|
||||
import time
|
||||
|
||||
import dns.exception
|
||||
import dns.hash
|
||||
import dns.name
|
||||
import dns.node
|
||||
import dns.rdataset
|
||||
import dns.rdata
|
||||
import dns.rdatatype
|
||||
import dns.rdataclass
|
||||
from ._compat import string_types
|
||||
|
||||
|
||||
class UnsupportedAlgorithm(dns.exception.DNSException):
|
||||
|
||||
"""The DNSSEC algorithm is not supported."""
|
||||
|
||||
|
||||
class ValidationFailure(dns.exception.DNSException):
|
||||
|
||||
"""The DNSSEC signature is invalid."""
|
||||
|
||||
RSAMD5 = 1
|
||||
DH = 2
|
||||
DSA = 3
|
||||
ECC = 4
|
||||
RSASHA1 = 5
|
||||
DSANSEC3SHA1 = 6
|
||||
RSASHA1NSEC3SHA1 = 7
|
||||
RSASHA256 = 8
|
||||
RSASHA512 = 10
|
||||
ECDSAP256SHA256 = 13
|
||||
ECDSAP384SHA384 = 14
|
||||
INDIRECT = 252
|
||||
PRIVATEDNS = 253
|
||||
PRIVATEOID = 254
|
||||
|
||||
_algorithm_by_text = {
|
||||
'RSAMD5': RSAMD5,
|
||||
'DH': DH,
|
||||
'DSA': DSA,
|
||||
'ECC': ECC,
|
||||
'RSASHA1': RSASHA1,
|
||||
'DSANSEC3SHA1': DSANSEC3SHA1,
|
||||
'RSASHA1NSEC3SHA1': RSASHA1NSEC3SHA1,
|
||||
'RSASHA256': RSASHA256,
|
||||
'RSASHA512': RSASHA512,
|
||||
'INDIRECT': INDIRECT,
|
||||
'ECDSAP256SHA256': ECDSAP256SHA256,
|
||||
'ECDSAP384SHA384': ECDSAP384SHA384,
|
||||
'PRIVATEDNS': PRIVATEDNS,
|
||||
'PRIVATEOID': PRIVATEOID,
|
||||
}
|
||||
|
||||
# We construct the inverse mapping programmatically to ensure that we
|
||||
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
|
||||
# would cause the mapping not to be true inverse.
|
||||
|
||||
_algorithm_by_value = dict((y, x) for x, y in _algorithm_by_text.items())
|
||||
|
||||
|
||||
def algorithm_from_text(text):
|
||||
"""Convert text into a DNSSEC algorithm value
|
||||
@rtype: int"""
|
||||
|
||||
value = _algorithm_by_text.get(text.upper())
|
||||
if value is None:
|
||||
value = int(text)
|
||||
return value
|
||||
|
||||
|
||||
def algorithm_to_text(value):
|
||||
"""Convert a DNSSEC algorithm value to text
|
||||
@rtype: string"""
|
||||
|
||||
text = _algorithm_by_value.get(value)
|
||||
if text is None:
|
||||
text = str(value)
|
||||
return text
|
||||
|
||||
|
||||
def _to_rdata(record, origin):
|
||||
s = BytesIO()
|
||||
record.to_wire(s, origin=origin)
|
||||
return s.getvalue()
|
||||
|
||||
|
||||
def key_id(key, origin=None):
|
||||
rdata = _to_rdata(key, origin)
|
||||
rdata = bytearray(rdata)
|
||||
if key.algorithm == RSAMD5:
|
||||
return (rdata[-3] << 8) + rdata[-2]
|
||||
else:
|
||||
total = 0
|
||||
for i in range(len(rdata) // 2):
|
||||
total += (rdata[2 * i] << 8) + \
|
||||
rdata[2 * i + 1]
|
||||
if len(rdata) % 2 != 0:
|
||||
total += rdata[len(rdata) - 1] << 8
|
||||
total += ((total >> 16) & 0xffff)
|
||||
return total & 0xffff
|
||||
|
||||
|
||||
def make_ds(name, key, algorithm, origin=None):
|
||||
if algorithm.upper() == 'SHA1':
|
||||
dsalg = 1
|
||||
hash = dns.hash.hashes['SHA1']()
|
||||
elif algorithm.upper() == 'SHA256':
|
||||
dsalg = 2
|
||||
hash = dns.hash.hashes['SHA256']()
|
||||
else:
|
||||
raise UnsupportedAlgorithm('unsupported algorithm "%s"' % algorithm)
|
||||
|
||||
if isinstance(name, string_types):
|
||||
name = dns.name.from_text(name, origin)
|
||||
hash.update(name.canonicalize().to_wire())
|
||||
hash.update(_to_rdata(key, origin))
|
||||
digest = hash.digest()
|
||||
|
||||
dsrdata = struct.pack("!HBB", key_id(key), key.algorithm, dsalg) + digest
|
||||
return dns.rdata.from_wire(dns.rdataclass.IN, dns.rdatatype.DS, dsrdata, 0,
|
||||
len(dsrdata))
|
||||
|
||||
|
||||
def _find_candidate_keys(keys, rrsig):
|
||||
candidate_keys = []
|
||||
value = keys.get(rrsig.signer)
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, dns.node.Node):
|
||||
try:
|
||||
rdataset = value.find_rdataset(dns.rdataclass.IN,
|
||||
dns.rdatatype.DNSKEY)
|
||||
except KeyError:
|
||||
return None
|
||||
else:
|
||||
rdataset = value
|
||||
for rdata in rdataset:
|
||||
if rdata.algorithm == rrsig.algorithm and \
|
||||
key_id(rdata) == rrsig.key_tag:
|
||||
candidate_keys.append(rdata)
|
||||
return candidate_keys
|
||||
|
||||
|
||||
def _is_rsa(algorithm):
|
||||
return algorithm in (RSAMD5, RSASHA1,
|
||||
RSASHA1NSEC3SHA1, RSASHA256,
|
||||
RSASHA512)
|
||||
|
||||
|
||||
def _is_dsa(algorithm):
|
||||
return algorithm in (DSA, DSANSEC3SHA1)
|
||||
|
||||
|
||||
def _is_ecdsa(algorithm):
|
||||
return _have_ecdsa and (algorithm in (ECDSAP256SHA256, ECDSAP384SHA384))
|
||||
|
||||
|
||||
def _is_md5(algorithm):
|
||||
return algorithm == RSAMD5
|
||||
|
||||
|
||||
def _is_sha1(algorithm):
|
||||
return algorithm in (DSA, RSASHA1,
|
||||
DSANSEC3SHA1, RSASHA1NSEC3SHA1)
|
||||
|
||||
|
||||
def _is_sha256(algorithm):
|
||||
return algorithm in (RSASHA256, ECDSAP256SHA256)
|
||||
|
||||
|
||||
def _is_sha384(algorithm):
|
||||
return algorithm == ECDSAP384SHA384
|
||||
|
||||
|
||||
def _is_sha512(algorithm):
|
||||
return algorithm == RSASHA512
|
||||
|
||||
|
||||
def _make_hash(algorithm):
|
||||
if _is_md5(algorithm):
|
||||
return dns.hash.hashes['MD5']()
|
||||
if _is_sha1(algorithm):
|
||||
return dns.hash.hashes['SHA1']()
|
||||
if _is_sha256(algorithm):
|
||||
return dns.hash.hashes['SHA256']()
|
||||
if _is_sha384(algorithm):
|
||||
return dns.hash.hashes['SHA384']()
|
||||
if _is_sha512(algorithm):
|
||||
return dns.hash.hashes['SHA512']()
|
||||
raise ValidationFailure('unknown hash for algorithm %u' % algorithm)
|
||||
|
||||
|
||||
def _make_algorithm_id(algorithm):
|
||||
if _is_md5(algorithm):
|
||||
oid = [0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x02, 0x05]
|
||||
elif _is_sha1(algorithm):
|
||||
oid = [0x2b, 0x0e, 0x03, 0x02, 0x1a]
|
||||
elif _is_sha256(algorithm):
|
||||
oid = [0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01]
|
||||
elif _is_sha512(algorithm):
|
||||
oid = [0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x03]
|
||||
else:
|
||||
raise ValidationFailure('unknown algorithm %u' % algorithm)
|
||||
olen = len(oid)
|
||||
dlen = _make_hash(algorithm).digest_size
|
||||
idbytes = [0x30] + [8 + olen + dlen] + \
|
||||
[0x30, olen + 4] + [0x06, olen] + oid + \
|
||||
[0x05, 0x00] + [0x04, dlen]
|
||||
return struct.pack('!%dB' % len(idbytes), *idbytes)
|
||||
|
||||
|
||||
def _validate_rrsig(rrset, rrsig, keys, origin=None, now=None):
|
||||
"""Validate an RRset against a single signature rdata
|
||||
|
||||
The owner name of the rrsig is assumed to be the same as the owner name
|
||||
of the rrset.
|
||||
|
||||
@param rrset: The RRset to validate
|
||||
@type rrset: dns.rrset.RRset or (dns.name.Name, dns.rdataset.Rdataset)
|
||||
tuple
|
||||
@param rrsig: The signature rdata
|
||||
@type rrsig: dns.rrset.Rdata
|
||||
@param keys: The key dictionary.
|
||||
@type keys: a dictionary keyed by dns.name.Name with node or rdataset
|
||||
values
|
||||
@param origin: The origin to use for relative names
|
||||
@type origin: dns.name.Name or None
|
||||
@param now: The time to use when validating the signatures. The default
|
||||
is the current time.
|
||||
@type now: int
|
||||
"""
|
||||
|
||||
if isinstance(origin, string_types):
|
||||
origin = dns.name.from_text(origin, dns.name.root)
|
||||
|
||||
for candidate_key in _find_candidate_keys(keys, rrsig):
|
||||
if not candidate_key:
|
||||
raise ValidationFailure('unknown key')
|
||||
|
||||
# For convenience, allow the rrset to be specified as a (name,
|
||||
# rdataset) tuple as well as a proper rrset
|
||||
if isinstance(rrset, tuple):
|
||||
rrname = rrset[0]
|
||||
rdataset = rrset[1]
|
||||
else:
|
||||
rrname = rrset.name
|
||||
rdataset = rrset
|
||||
|
||||
if now is None:
|
||||
now = time.time()
|
||||
if rrsig.expiration < now:
|
||||
raise ValidationFailure('expired')
|
||||
if rrsig.inception > now:
|
||||
raise ValidationFailure('not yet valid')
|
||||
|
||||
hash = _make_hash(rrsig.algorithm)
|
||||
|
||||
if _is_rsa(rrsig.algorithm):
|
||||
keyptr = candidate_key.key
|
||||
(bytes_,) = struct.unpack('!B', keyptr[0:1])
|
||||
keyptr = keyptr[1:]
|
||||
if bytes_ == 0:
|
||||
(bytes_,) = struct.unpack('!H', keyptr[0:2])
|
||||
keyptr = keyptr[2:]
|
||||
rsa_e = keyptr[0:bytes_]
|
||||
rsa_n = keyptr[bytes_:]
|
||||
keylen = len(rsa_n) * 8
|
||||
pubkey = Crypto.PublicKey.RSA.construct(
|
||||
(Crypto.Util.number.bytes_to_long(rsa_n),
|
||||
Crypto.Util.number.bytes_to_long(rsa_e)))
|
||||
sig = (Crypto.Util.number.bytes_to_long(rrsig.signature),)
|
||||
elif _is_dsa(rrsig.algorithm):
|
||||
keyptr = candidate_key.key
|
||||
(t,) = struct.unpack('!B', keyptr[0:1])
|
||||
keyptr = keyptr[1:]
|
||||
octets = 64 + t * 8
|
||||
dsa_q = keyptr[0:20]
|
||||
keyptr = keyptr[20:]
|
||||
dsa_p = keyptr[0:octets]
|
||||
keyptr = keyptr[octets:]
|
||||
dsa_g = keyptr[0:octets]
|
||||
keyptr = keyptr[octets:]
|
||||
dsa_y = keyptr[0:octets]
|
||||
pubkey = Crypto.PublicKey.DSA.construct(
|
||||
(Crypto.Util.number.bytes_to_long(dsa_y),
|
||||
Crypto.Util.number.bytes_to_long(dsa_g),
|
||||
Crypto.Util.number.bytes_to_long(dsa_p),
|
||||
Crypto.Util.number.bytes_to_long(dsa_q)))
|
||||
(dsa_r, dsa_s) = struct.unpack('!20s20s', rrsig.signature[1:])
|
||||
sig = (Crypto.Util.number.bytes_to_long(dsa_r),
|
||||
Crypto.Util.number.bytes_to_long(dsa_s))
|
||||
elif _is_ecdsa(rrsig.algorithm):
|
||||
if rrsig.algorithm == ECDSAP256SHA256:
|
||||
curve = ecdsa.curves.NIST256p
|
||||
key_len = 32
|
||||
elif rrsig.algorithm == ECDSAP384SHA384:
|
||||
curve = ecdsa.curves.NIST384p
|
||||
key_len = 48
|
||||
else:
|
||||
# shouldn't happen
|
||||
raise ValidationFailure('unknown ECDSA curve')
|
||||
keyptr = candidate_key.key
|
||||
x = Crypto.Util.number.bytes_to_long(keyptr[0:key_len])
|
||||
y = Crypto.Util.number.bytes_to_long(keyptr[key_len:key_len * 2])
|
||||
assert ecdsa.ecdsa.point_is_valid(curve.generator, x, y)
|
||||
point = ecdsa.ellipticcurve.Point(curve.curve, x, y, curve.order)
|
||||
verifying_key = ecdsa.keys.VerifyingKey.from_public_point(point,
|
||||
curve)
|
||||
pubkey = ECKeyWrapper(verifying_key, key_len)
|
||||
r = rrsig.signature[:key_len]
|
||||
s = rrsig.signature[key_len:]
|
||||
sig = ecdsa.ecdsa.Signature(Crypto.Util.number.bytes_to_long(r),
|
||||
Crypto.Util.number.bytes_to_long(s))
|
||||
else:
|
||||
raise ValidationFailure('unknown algorithm %u' % rrsig.algorithm)
|
||||
|
||||
hash.update(_to_rdata(rrsig, origin)[:18])
|
||||
hash.update(rrsig.signer.to_digestable(origin))
|
||||
|
||||
if rrsig.labels < len(rrname) - 1:
|
||||
suffix = rrname.split(rrsig.labels + 1)[1]
|
||||
rrname = dns.name.from_text('*', suffix)
|
||||
rrnamebuf = rrname.to_digestable(origin)
|
||||
rrfixed = struct.pack('!HHI', rdataset.rdtype, rdataset.rdclass,
|
||||
rrsig.original_ttl)
|
||||
rrlist = sorted(rdataset)
|
||||
for rr in rrlist:
|
||||
hash.update(rrnamebuf)
|
||||
hash.update(rrfixed)
|
||||
rrdata = rr.to_digestable(origin)
|
||||
rrlen = struct.pack('!H', len(rrdata))
|
||||
hash.update(rrlen)
|
||||
hash.update(rrdata)
|
||||
|
||||
digest = hash.digest()
|
||||
|
||||
if _is_rsa(rrsig.algorithm):
|
||||
# PKCS1 algorithm identifier goop
|
||||
digest = _make_algorithm_id(rrsig.algorithm) + digest
|
||||
padlen = keylen // 8 - len(digest) - 3
|
||||
digest = struct.pack('!%dB' % (2 + padlen + 1),
|
||||
*([0, 1] + [0xFF] * padlen + [0])) + digest
|
||||
elif _is_dsa(rrsig.algorithm) or _is_ecdsa(rrsig.algorithm):
|
||||
pass
|
||||
else:
|
||||
# Raise here for code clarity; this won't actually ever happen
|
||||
# since if the algorithm is really unknown we'd already have
|
||||
# raised an exception above
|
||||
raise ValidationFailure('unknown algorithm %u' % rrsig.algorithm)
|
||||
|
||||
if pubkey.verify(digest, sig):
|
||||
return
|
||||
raise ValidationFailure('verify failure')
|
||||
|
||||
|
||||
def _validate(rrset, rrsigset, keys, origin=None, now=None):
|
||||
"""Validate an RRset
|
||||
|
||||
@param rrset: The RRset to validate
|
||||
@type rrset: dns.rrset.RRset or (dns.name.Name, dns.rdataset.Rdataset)
|
||||
tuple
|
||||
@param rrsigset: The signature RRset
|
||||
@type rrsigset: dns.rrset.RRset or (dns.name.Name, dns.rdataset.Rdataset)
|
||||
tuple
|
||||
@param keys: The key dictionary.
|
||||
@type keys: a dictionary keyed by dns.name.Name with node or rdataset
|
||||
values
|
||||
@param origin: The origin to use for relative names
|
||||
@type origin: dns.name.Name or None
|
||||
@param now: The time to use when validating the signatures. The default
|
||||
is the current time.
|
||||
@type now: int
|
||||
"""
|
||||
|
||||
if isinstance(origin, string_types):
|
||||
origin = dns.name.from_text(origin, dns.name.root)
|
||||
|
||||
if isinstance(rrset, tuple):
|
||||
rrname = rrset[0]
|
||||
else:
|
||||
rrname = rrset.name
|
||||
|
||||
if isinstance(rrsigset, tuple):
|
||||
rrsigname = rrsigset[0]
|
||||
rrsigrdataset = rrsigset[1]
|
||||
else:
|
||||
rrsigname = rrsigset.name
|
||||
rrsigrdataset = rrsigset
|
||||
|
||||
rrname = rrname.choose_relativity(origin)
|
||||
rrsigname = rrname.choose_relativity(origin)
|
||||
if rrname != rrsigname:
|
||||
raise ValidationFailure("owner names do not match")
|
||||
|
||||
for rrsig in rrsigrdataset:
|
||||
try:
|
||||
_validate_rrsig(rrset, rrsig, keys, origin, now)
|
||||
return
|
||||
except ValidationFailure:
|
||||
pass
|
||||
raise ValidationFailure("no RRSIGs validated")
|
||||
|
||||
|
||||
def _need_pycrypto(*args, **kwargs):
|
||||
raise NotImplementedError("DNSSEC validation requires pycrypto")
|
||||
|
||||
try:
|
||||
import Crypto.PublicKey.RSA
|
||||
import Crypto.PublicKey.DSA
|
||||
import Crypto.Util.number
|
||||
validate = _validate
|
||||
validate_rrsig = _validate_rrsig
|
||||
_have_pycrypto = True
|
||||
except ImportError:
|
||||
validate = _need_pycrypto
|
||||
validate_rrsig = _need_pycrypto
|
||||
_have_pycrypto = False
|
||||
|
||||
try:
|
||||
import ecdsa
|
||||
import ecdsa.ecdsa
|
||||
import ecdsa.ellipticcurve
|
||||
import ecdsa.keys
|
||||
_have_ecdsa = True
|
||||
|
||||
class ECKeyWrapper(object):
|
||||
|
||||
def __init__(self, key, key_len):
|
||||
self.key = key
|
||||
self.key_len = key_len
|
||||
|
||||
def verify(self, digest, sig):
|
||||
diglong = Crypto.Util.number.bytes_to_long(digest)
|
||||
return self.key.pubkey.verifies(diglong, sig)
|
||||
|
||||
except ImportError:
|
||||
_have_ecdsa = False
|
84
lib/dns/e164.py
Normal file
@@ -0,0 +1,84 @@
|
||||
# Copyright (C) 2006, 2007, 2009, 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS E.164 helpers
|
||||
|
||||
@var public_enum_domain: The DNS public ENUM domain, e164.arpa.
|
||||
@type public_enum_domain: dns.name.Name object
|
||||
"""
|
||||
|
||||
|
||||
import dns.exception
|
||||
import dns.name
|
||||
import dns.resolver
|
||||
from ._compat import string_types
|
||||
|
||||
public_enum_domain = dns.name.from_text('e164.arpa.')
|
||||
|
||||
|
||||
def from_e164(text, origin=public_enum_domain):
|
||||
"""Convert an E.164 number in textual form into a Name object whose
|
||||
value is the ENUM domain name for that number.
|
||||
@param text: an E.164 number in textual form.
|
||||
@type text: str
|
||||
@param origin: The domain in which the number should be constructed.
|
||||
The default is e164.arpa.
|
||||
@type origin: dns.name.Name object or None
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
parts = [d for d in text if d.isdigit()]
|
||||
parts.reverse()
|
||||
return dns.name.from_text('.'.join(parts), origin=origin)
|
||||
|
||||
|
||||
def to_e164(name, origin=public_enum_domain, want_plus_prefix=True):
|
||||
"""Convert an ENUM domain name into an E.164 number.
|
||||
@param name: the ENUM domain name.
|
||||
@type name: dns.name.Name object.
|
||||
@param origin: A domain containing the ENUM domain name. The
|
||||
name is relativized to this domain before being converted to text.
|
||||
@type origin: dns.name.Name object or None
|
||||
@param want_plus_prefix: if True, add a '+' to the beginning of the
|
||||
returned number.
|
||||
@rtype: str
|
||||
"""
|
||||
if origin is not None:
|
||||
name = name.relativize(origin)
|
||||
dlabels = [d for d in name.labels if (d.isdigit() and len(d) == 1)]
|
||||
if len(dlabels) != len(name.labels):
|
||||
raise dns.exception.SyntaxError('non-digit labels in ENUM domain name')
|
||||
dlabels.reverse()
|
||||
text = b''.join(dlabels)
|
||||
if want_plus_prefix:
|
||||
text = b'+' + text
|
||||
return text
|
||||
|
||||
|
||||
def query(number, domains, resolver=None):
|
||||
"""Look for NAPTR RRs for the specified number in the specified domains.
|
||||
|
||||
e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.'])
|
||||
"""
|
||||
if resolver is None:
|
||||
resolver = dns.resolver.get_default_resolver()
|
||||
for domain in domains:
|
||||
if isinstance(domain, string_types):
|
||||
domain = dns.name.from_text(domain)
|
||||
qname = dns.e164.from_e164(number, domain)
|
||||
try:
|
||||
return resolver.query(qname, 'NAPTR')
|
||||
except dns.resolver.NXDOMAIN:
|
||||
pass
|
||||
raise dns.resolver.NXDOMAIN
|
150
lib/dns/edns.py
Normal file
@@ -0,0 +1,150 @@
|
||||
# Copyright (C) 2009, 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""EDNS Options"""
|
||||
|
||||
NSID = 3
|
||||
|
||||
|
||||
class Option(object):
|
||||
|
||||
"""Base class for all EDNS option types.
|
||||
"""
|
||||
|
||||
def __init__(self, otype):
|
||||
"""Initialize an option.
|
||||
@param otype: The rdata type
|
||||
@type otype: int
|
||||
"""
|
||||
self.otype = otype
|
||||
|
||||
def to_wire(self, file):
|
||||
"""Convert an option to wire format.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, otype, wire, current, olen):
|
||||
"""Build an EDNS option object from wire format
|
||||
|
||||
@param otype: The option type
|
||||
@type otype: int
|
||||
@param wire: The wire-format message
|
||||
@type wire: string
|
||||
@param current: The offset in wire of the beginning of the rdata.
|
||||
@type current: int
|
||||
@param olen: The length of the wire-format option data
|
||||
@type olen: int
|
||||
@rtype: dns.edns.Option instance"""
|
||||
raise NotImplementedError
|
||||
|
||||
def _cmp(self, other):
|
||||
"""Compare an EDNS option with another option of the same type.
|
||||
Return < 0 if self < other, 0 if self == other,
|
||||
and > 0 if self > other.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Option):
|
||||
return False
|
||||
if self.otype != other.otype:
|
||||
return False
|
||||
return self._cmp(other) == 0
|
||||
|
||||
def __ne__(self, other):
|
||||
if not isinstance(other, Option):
|
||||
return False
|
||||
if self.otype != other.otype:
|
||||
return False
|
||||
return self._cmp(other) != 0
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, Option) or \
|
||||
self.otype != other.otype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) < 0
|
||||
|
||||
def __le__(self, other):
|
||||
if not isinstance(other, Option) or \
|
||||
self.otype != other.otype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) <= 0
|
||||
|
||||
def __ge__(self, other):
|
||||
if not isinstance(other, Option) or \
|
||||
self.otype != other.otype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) >= 0
|
||||
|
||||
def __gt__(self, other):
|
||||
if not isinstance(other, Option) or \
|
||||
self.otype != other.otype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) > 0
|
||||
|
||||
|
||||
class GenericOption(Option):
|
||||
|
||||
"""Generate Rdata Class
|
||||
|
||||
This class is used for EDNS option types for which we have no better
|
||||
implementation.
|
||||
"""
|
||||
|
||||
def __init__(self, otype, data):
|
||||
super(GenericOption, self).__init__(otype)
|
||||
self.data = data
|
||||
|
||||
def to_wire(self, file):
|
||||
file.write(self.data)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, otype, wire, current, olen):
|
||||
return cls(otype, wire[current: current + olen])
|
||||
|
||||
def _cmp(self, other):
|
||||
if self.data == other.data:
|
||||
return 0
|
||||
if self.data > other.data:
|
||||
return 1
|
||||
return -1
|
||||
|
||||
_type_to_class = {
|
||||
}
|
||||
|
||||
|
||||
def get_option_class(otype):
|
||||
cls = _type_to_class.get(otype)
|
||||
if cls is None:
|
||||
cls = GenericOption
|
||||
return cls
|
||||
|
||||
|
||||
def option_from_wire(otype, wire, current, olen):
|
||||
"""Build an EDNS option object from wire format
|
||||
|
||||
@param otype: The option type
|
||||
@type otype: int
|
||||
@param wire: The wire-format message
|
||||
@type wire: string
|
||||
@param current: The offset in wire of the beginning of the rdata.
|
||||
@type current: int
|
||||
@param olen: The length of the wire-format option data
|
||||
@type olen: int
|
||||
@rtype: dns.edns.Option instance"""
|
||||
|
||||
cls = get_option_class(otype)
|
||||
return cls.from_wire(otype, wire, current, olen)
|
127
lib/dns/entropy.py
Normal file
@@ -0,0 +1,127 @@
|
||||
# Copyright (C) 2009, 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import os
|
||||
import time
|
||||
from ._compat import long, binary_type
|
||||
try:
|
||||
import threading as _threading
|
||||
except ImportError:
|
||||
import dummy_threading as _threading
|
||||
|
||||
|
||||
class EntropyPool(object):
|
||||
|
||||
def __init__(self, seed=None):
|
||||
self.pool_index = 0
|
||||
self.digest = None
|
||||
self.next_byte = 0
|
||||
self.lock = _threading.Lock()
|
||||
try:
|
||||
import hashlib
|
||||
self.hash = hashlib.sha1()
|
||||
self.hash_len = 20
|
||||
except:
|
||||
try:
|
||||
import sha
|
||||
self.hash = sha.new()
|
||||
self.hash_len = 20
|
||||
except:
|
||||
import md5
|
||||
self.hash = md5.new()
|
||||
self.hash_len = 16
|
||||
self.pool = bytearray(b'\0' * self.hash_len)
|
||||
if seed is not None:
|
||||
self.stir(bytearray(seed))
|
||||
self.seeded = True
|
||||
else:
|
||||
self.seeded = False
|
||||
|
||||
def stir(self, entropy, already_locked=False):
|
||||
if not already_locked:
|
||||
self.lock.acquire()
|
||||
try:
|
||||
for c in entropy:
|
||||
if self.pool_index == self.hash_len:
|
||||
self.pool_index = 0
|
||||
b = c & 0xff
|
||||
self.pool[self.pool_index] ^= b
|
||||
self.pool_index += 1
|
||||
finally:
|
||||
if not already_locked:
|
||||
self.lock.release()
|
||||
|
||||
def _maybe_seed(self):
|
||||
if not self.seeded:
|
||||
try:
|
||||
seed = os.urandom(16)
|
||||
except:
|
||||
try:
|
||||
r = open('/dev/urandom', 'rb', 0)
|
||||
try:
|
||||
seed = r.read(16)
|
||||
finally:
|
||||
r.close()
|
||||
except:
|
||||
seed = str(time.time())
|
||||
self.seeded = True
|
||||
seed = bytearray(seed)
|
||||
self.stir(seed, True)
|
||||
|
||||
def random_8(self):
|
||||
self.lock.acquire()
|
||||
try:
|
||||
self._maybe_seed()
|
||||
if self.digest is None or self.next_byte == self.hash_len:
|
||||
self.hash.update(binary_type(self.pool))
|
||||
self.digest = bytearray(self.hash.digest())
|
||||
self.stir(self.digest, True)
|
||||
self.next_byte = 0
|
||||
value = self.digest[self.next_byte]
|
||||
self.next_byte += 1
|
||||
finally:
|
||||
self.lock.release()
|
||||
return value
|
||||
|
||||
def random_16(self):
|
||||
return self.random_8() * 256 + self.random_8()
|
||||
|
||||
def random_32(self):
|
||||
return self.random_16() * 65536 + self.random_16()
|
||||
|
||||
def random_between(self, first, last):
|
||||
size = last - first + 1
|
||||
if size > long(4294967296):
|
||||
raise ValueError('too big')
|
||||
if size > 65536:
|
||||
rand = self.random_32
|
||||
max = long(4294967295)
|
||||
elif size > 256:
|
||||
rand = self.random_16
|
||||
max = 65535
|
||||
else:
|
||||
rand = self.random_8
|
||||
max = 255
|
||||
return (first + size * rand() // (max + 1))
|
||||
|
||||
pool = EntropyPool()
|
||||
|
||||
|
||||
def random_16():
|
||||
return pool.random_16()
|
||||
|
||||
|
||||
def between(first, last):
|
||||
return pool.random_between(first, last)
|
124
lib/dns/exception.py
Normal file
@@ -0,0 +1,124 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""Common DNS Exceptions."""
|
||||
|
||||
|
||||
class DNSException(Exception):
|
||||
|
||||
"""Abstract base class shared by all dnspython exceptions.
|
||||
|
||||
It supports two basic modes of operation:
|
||||
|
||||
a) Old/compatible mode is used if __init__ was called with
|
||||
empty **kwargs.
|
||||
In compatible mode all *args are passed to standard Python Exception class
|
||||
as before and all *args are printed by standard __str__ implementation.
|
||||
Class variable msg (or doc string if msg is None) is returned from str()
|
||||
if *args is empty.
|
||||
|
||||
b) New/parametrized mode is used if __init__ was called with
|
||||
non-empty **kwargs.
|
||||
In the new mode *args has to be empty and all kwargs has to exactly match
|
||||
set in class variable self.supp_kwargs. All kwargs are stored inside
|
||||
self.kwargs and used in new __str__ implementation to construct
|
||||
formatted message based on self.fmt string.
|
||||
|
||||
In the simplest case it is enough to override supp_kwargs and fmt
|
||||
class variables to get nice parametrized messages.
|
||||
"""
|
||||
msg = None # non-parametrized message
|
||||
supp_kwargs = set() # accepted parameters for _fmt_kwargs (sanity check)
|
||||
fmt = None # message parametrized with results from _fmt_kwargs
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._check_params(*args, **kwargs)
|
||||
self._check_kwargs(**kwargs)
|
||||
self.kwargs = kwargs
|
||||
if self.msg is None:
|
||||
# doc string is better implicit message than empty string
|
||||
self.msg = self.__doc__
|
||||
if args:
|
||||
super(DNSException, self).__init__(*args)
|
||||
else:
|
||||
super(DNSException, self).__init__(self.msg)
|
||||
|
||||
def _check_params(self, *args, **kwargs):
|
||||
"""Old exceptions supported only args and not kwargs.
|
||||
|
||||
For sanity we do not allow to mix old and new behavior."""
|
||||
if args or kwargs:
|
||||
assert bool(args) != bool(kwargs), \
|
||||
'keyword arguments are mutually exclusive with positional args'
|
||||
|
||||
def _check_kwargs(self, **kwargs):
|
||||
if kwargs:
|
||||
assert set(kwargs.keys()) == self.supp_kwargs, \
|
||||
'following set of keyword args is required: %s' % (
|
||||
self.supp_kwargs)
|
||||
|
||||
def _fmt_kwargs(self, **kwargs):
|
||||
"""Format kwargs before printing them.
|
||||
|
||||
Resulting dictionary has to have keys necessary for str.format call
|
||||
on fmt class variable.
|
||||
"""
|
||||
fmtargs = {}
|
||||
for kw, data in kwargs.items():
|
||||
if isinstance(data, (list, set)):
|
||||
# convert list of <someobj> to list of str(<someobj>)
|
||||
fmtargs[kw] = list(map(str, data))
|
||||
if len(fmtargs[kw]) == 1:
|
||||
# remove list brackets [] from single-item lists
|
||||
fmtargs[kw] = fmtargs[kw].pop()
|
||||
else:
|
||||
fmtargs[kw] = data
|
||||
return fmtargs
|
||||
|
||||
def __str__(self):
|
||||
if self.kwargs and self.fmt:
|
||||
# provide custom message constructed from keyword arguments
|
||||
fmtargs = self._fmt_kwargs(**self.kwargs)
|
||||
return self.fmt.format(**fmtargs)
|
||||
else:
|
||||
# print *args directly in the same way as old DNSException
|
||||
return super(DNSException, self).__str__()
|
||||
|
||||
|
||||
class FormError(DNSException):
|
||||
|
||||
"""DNS message is malformed."""
|
||||
|
||||
|
||||
class SyntaxError(DNSException):
|
||||
|
||||
"""Text input is malformed."""
|
||||
|
||||
|
||||
class UnexpectedEnd(SyntaxError):
|
||||
|
||||
"""Text input ended unexpectedly."""
|
||||
|
||||
|
||||
class TooBig(DNSException):
|
||||
|
||||
"""The DNS message is too big."""
|
||||
|
||||
|
||||
class Timeout(DNSException):
|
||||
|
||||
"""The DNS operation timed out."""
|
||||
supp_kwargs = set(['timeout'])
|
||||
fmt = "The DNS operation timed out after {timeout} seconds"
|
112
lib/dns/flags.py
Normal file
@@ -0,0 +1,112 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS Message Flags."""
|
||||
|
||||
# Standard DNS flags
|
||||
|
||||
QR = 0x8000
|
||||
AA = 0x0400
|
||||
TC = 0x0200
|
||||
RD = 0x0100
|
||||
RA = 0x0080
|
||||
AD = 0x0020
|
||||
CD = 0x0010
|
||||
|
||||
# EDNS flags
|
||||
|
||||
DO = 0x8000
|
||||
|
||||
_by_text = {
|
||||
'QR': QR,
|
||||
'AA': AA,
|
||||
'TC': TC,
|
||||
'RD': RD,
|
||||
'RA': RA,
|
||||
'AD': AD,
|
||||
'CD': CD
|
||||
}
|
||||
|
||||
_edns_by_text = {
|
||||
'DO': DO
|
||||
}
|
||||
|
||||
|
||||
# We construct the inverse mappings programmatically to ensure that we
|
||||
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
|
||||
# would cause the mappings not to be true inverses.
|
||||
|
||||
_by_value = dict((y, x) for x, y in _by_text.items())
|
||||
|
||||
_edns_by_value = dict((y, x) for x, y in _edns_by_text.items())
|
||||
|
||||
|
||||
def _order_flags(table):
|
||||
order = list(table.items())
|
||||
order.sort()
|
||||
order.reverse()
|
||||
return order
|
||||
|
||||
_flags_order = _order_flags(_by_value)
|
||||
|
||||
_edns_flags_order = _order_flags(_edns_by_value)
|
||||
|
||||
|
||||
def _from_text(text, table):
|
||||
flags = 0
|
||||
tokens = text.split()
|
||||
for t in tokens:
|
||||
flags = flags | table[t.upper()]
|
||||
return flags
|
||||
|
||||
|
||||
def _to_text(flags, table, order):
|
||||
text_flags = []
|
||||
for k, v in order:
|
||||
if flags & k != 0:
|
||||
text_flags.append(v)
|
||||
return ' '.join(text_flags)
|
||||
|
||||
|
||||
def from_text(text):
|
||||
"""Convert a space-separated list of flag text values into a flags
|
||||
value.
|
||||
@rtype: int"""
|
||||
|
||||
return _from_text(text, _by_text)
|
||||
|
||||
|
||||
def to_text(flags):
|
||||
"""Convert a flags value into a space-separated list of flag text
|
||||
values.
|
||||
@rtype: string"""
|
||||
|
||||
return _to_text(flags, _by_value, _flags_order)
|
||||
|
||||
|
||||
def edns_from_text(text):
|
||||
"""Convert a space-separated list of EDNS flag text values into a EDNS
|
||||
flags value.
|
||||
@rtype: int"""
|
||||
|
||||
return _from_text(text, _edns_by_text)
|
||||
|
||||
|
||||
def edns_to_text(flags):
|
||||
"""Convert an EDNS flags value into a space-separated list of EDNS flag
|
||||
text values.
|
||||
@rtype: string"""
|
||||
|
||||
return _to_text(flags, _edns_by_value, _edns_flags_order)
|
65
lib/dns/grange.py
Normal file
@@ -0,0 +1,65 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS GENERATE range conversion."""
|
||||
|
||||
import dns
|
||||
|
||||
|
||||
def from_text(text):
|
||||
"""Convert the text form of a range in a GENERATE statement to an
|
||||
integer.
|
||||
|
||||
@param text: the textual range
|
||||
@type text: string
|
||||
@return: The start, stop and step values.
|
||||
@rtype: tuple
|
||||
"""
|
||||
# TODO, figure out the bounds on start, stop and step.
|
||||
|
||||
step = 1
|
||||
cur = ''
|
||||
state = 0
|
||||
# state 0 1 2 3 4
|
||||
# x - y / z
|
||||
for c in text:
|
||||
if c == '-' and state == 0:
|
||||
start = int(cur)
|
||||
cur = ''
|
||||
state = 2
|
||||
elif c == '/':
|
||||
stop = int(cur)
|
||||
cur = ''
|
||||
state = 4
|
||||
elif c.isdigit():
|
||||
cur += c
|
||||
else:
|
||||
raise dns.exception.SyntaxError("Could not parse %s" % (c))
|
||||
|
||||
if state in (1, 3):
|
||||
raise dns.exception.SyntaxError
|
||||
|
||||
if state == 2:
|
||||
stop = int(cur)
|
||||
|
||||
if state == 4:
|
||||
step = int(cur)
|
||||
|
||||
assert step >= 1
|
||||
assert start >= 0
|
||||
assert start <= stop
|
||||
# TODO, can start == stop?
|
||||
|
||||
return (start, stop, step)
|
32
lib/dns/hash.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# Copyright (C) 2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""Hashing backwards compatibility wrapper"""
|
||||
|
||||
import sys
|
||||
import hashlib
|
||||
|
||||
|
||||
hashes = {}
|
||||
hashes['MD5'] = hashlib.md5
|
||||
hashes['SHA1'] = hashlib.sha1
|
||||
hashes['SHA224'] = hashlib.sha224
|
||||
hashes['SHA256'] = hashlib.sha256
|
||||
hashes['SHA384'] = hashlib.sha384
|
||||
hashes['SHA512'] = hashlib.sha512
|
||||
|
||||
|
||||
def get(algorithm):
|
||||
return hashes[algorithm.upper()]
|
111
lib/dns/inet.py
Normal file
@@ -0,0 +1,111 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""Generic Internet address helper functions."""
|
||||
|
||||
import socket
|
||||
|
||||
import dns.ipv4
|
||||
import dns.ipv6
|
||||
|
||||
|
||||
# We assume that AF_INET is always defined.
|
||||
|
||||
AF_INET = socket.AF_INET
|
||||
|
||||
# AF_INET6 might not be defined in the socket module, but we need it.
|
||||
# We'll try to use the socket module's value, and if it doesn't work,
|
||||
# we'll use our own value.
|
||||
|
||||
try:
|
||||
AF_INET6 = socket.AF_INET6
|
||||
except AttributeError:
|
||||
AF_INET6 = 9999
|
||||
|
||||
|
||||
def inet_pton(family, text):
|
||||
"""Convert the textual form of a network address into its binary form.
|
||||
|
||||
@param family: the address family
|
||||
@type family: int
|
||||
@param text: the textual address
|
||||
@type text: string
|
||||
@raises NotImplementedError: the address family specified is not
|
||||
implemented.
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
if family == AF_INET:
|
||||
return dns.ipv4.inet_aton(text)
|
||||
elif family == AF_INET6:
|
||||
return dns.ipv6.inet_aton(text)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def inet_ntop(family, address):
|
||||
"""Convert the binary form of a network address into its textual form.
|
||||
|
||||
@param family: the address family
|
||||
@type family: int
|
||||
@param address: the binary address
|
||||
@type address: string
|
||||
@raises NotImplementedError: the address family specified is not
|
||||
implemented.
|
||||
@rtype: string
|
||||
"""
|
||||
if family == AF_INET:
|
||||
return dns.ipv4.inet_ntoa(address)
|
||||
elif family == AF_INET6:
|
||||
return dns.ipv6.inet_ntoa(address)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def af_for_address(text):
|
||||
"""Determine the address family of a textual-form network address.
|
||||
|
||||
@param text: the textual address
|
||||
@type text: string
|
||||
@raises ValueError: the address family cannot be determined from the input.
|
||||
@rtype: int
|
||||
"""
|
||||
try:
|
||||
dns.ipv4.inet_aton(text)
|
||||
return AF_INET
|
||||
except:
|
||||
try:
|
||||
dns.ipv6.inet_aton(text)
|
||||
return AF_INET6
|
||||
except:
|
||||
raise ValueError
|
||||
|
||||
|
||||
def is_multicast(text):
|
||||
"""Is the textual-form network address a multicast address?
|
||||
|
||||
@param text: the textual address
|
||||
@raises ValueError: the address family cannot be determined from the input.
|
||||
@rtype: bool
|
||||
"""
|
||||
try:
|
||||
first = ord(dns.ipv4.inet_aton(text)[0])
|
||||
return (first >= 224 and first <= 239)
|
||||
except:
|
||||
try:
|
||||
first = ord(dns.ipv6.inet_aton(text)[0])
|
||||
return (first == 255)
|
||||
except:
|
||||
raise ValueError
|
59
lib/dns/ipv4.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""IPv4 helper functions."""
|
||||
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
from ._compat import binary_type
|
||||
|
||||
def inet_ntoa(address):
|
||||
"""Convert an IPv4 address in network form to text form.
|
||||
|
||||
@param address: The IPv4 address
|
||||
@type address: string
|
||||
@returns: string
|
||||
"""
|
||||
if len(address) != 4:
|
||||
raise dns.exception.SyntaxError
|
||||
if not isinstance(address, bytearray):
|
||||
address = bytearray(address)
|
||||
return (u'%u.%u.%u.%u' % (address[0], address[1],
|
||||
address[2], address[3])).encode()
|
||||
|
||||
def inet_aton(text):
|
||||
"""Convert an IPv4 address in text form to network form.
|
||||
|
||||
@param text: The IPv4 address
|
||||
@type text: string
|
||||
@returns: string
|
||||
"""
|
||||
if not isinstance(text, binary_type):
|
||||
text = text.encode()
|
||||
parts = text.split(b'.')
|
||||
if len(parts) != 4:
|
||||
raise dns.exception.SyntaxError
|
||||
for part in parts:
|
||||
if not part.isdigit():
|
||||
raise dns.exception.SyntaxError
|
||||
if len(part) > 1 and part[0] == '0':
|
||||
# No leading zeros
|
||||
raise dns.exception.SyntaxError
|
||||
try:
|
||||
bytes = [int(part) for part in parts]
|
||||
return struct.pack('BBBB', *bytes)
|
||||
except:
|
||||
raise dns.exception.SyntaxError
|
172
lib/dns/ipv6.py
Normal file
@@ -0,0 +1,172 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""IPv6 helper functions."""
|
||||
|
||||
import re
|
||||
import binascii
|
||||
|
||||
import dns.exception
|
||||
import dns.ipv4
|
||||
from ._compat import xrange, binary_type
|
||||
|
||||
_leading_zero = re.compile(b'0+([0-9a-f]+)')
|
||||
|
||||
def inet_ntoa(address):
|
||||
"""Convert a network format IPv6 address into text.
|
||||
|
||||
@param address: the binary address
|
||||
@type address: string
|
||||
@rtype: string
|
||||
@raises ValueError: the address isn't 16 bytes long
|
||||
"""
|
||||
|
||||
if len(address) != 16:
|
||||
raise ValueError("IPv6 addresses are 16 bytes long")
|
||||
hex = binascii.hexlify(address)
|
||||
chunks = []
|
||||
i = 0
|
||||
l = len(hex)
|
||||
while i < l:
|
||||
chunk = hex[i : i + 4]
|
||||
# strip leading zeros. we do this with an re instead of
|
||||
# with lstrip() because lstrip() didn't support chars until
|
||||
# python 2.2.2
|
||||
m = _leading_zero.match(chunk)
|
||||
if not m is None:
|
||||
chunk = m.group(1)
|
||||
chunks.append(chunk)
|
||||
i += 4
|
||||
#
|
||||
# Compress the longest subsequence of 0-value chunks to ::
|
||||
#
|
||||
best_start = 0
|
||||
best_len = 0
|
||||
start = -1
|
||||
last_was_zero = False
|
||||
for i in xrange(8):
|
||||
if chunks[i] != b'0':
|
||||
if last_was_zero:
|
||||
end = i
|
||||
current_len = end - start
|
||||
if current_len > best_len:
|
||||
best_start = start
|
||||
best_len = current_len
|
||||
last_was_zero = False
|
||||
elif not last_was_zero:
|
||||
start = i
|
||||
last_was_zero = True
|
||||
if last_was_zero:
|
||||
end = 8
|
||||
current_len = end - start
|
||||
if current_len > best_len:
|
||||
best_start = start
|
||||
best_len = current_len
|
||||
if best_len > 1:
|
||||
if best_start == 0 and \
|
||||
(best_len == 6 or
|
||||
best_len == 5 and chunks[5] == b'ffff'):
|
||||
# We have an embedded IPv4 address
|
||||
if best_len == 6:
|
||||
prefix = b'::'
|
||||
else:
|
||||
prefix = b'::ffff:'
|
||||
hex = prefix + dns.ipv4.inet_ntoa(address[12:])
|
||||
else:
|
||||
hex = b':'.join(chunks[:best_start]) + b'::' + \
|
||||
b':'.join(chunks[best_start + best_len:])
|
||||
else:
|
||||
hex = b':'.join(chunks)
|
||||
return hex
|
||||
|
||||
_v4_ending = re.compile(b'(.*):(\d+\.\d+\.\d+\.\d+)$')
|
||||
_colon_colon_start = re.compile(b'::.*')
|
||||
_colon_colon_end = re.compile(b'.*::$')
|
||||
|
||||
def inet_aton(text):
|
||||
"""Convert a text format IPv6 address into network format.
|
||||
|
||||
@param text: the textual address
|
||||
@type text: string
|
||||
@rtype: string
|
||||
@raises dns.exception.SyntaxError: the text was not properly formatted
|
||||
"""
|
||||
|
||||
#
|
||||
# Our aim here is not something fast; we just want something that works.
|
||||
#
|
||||
if not isinstance(text, binary_type):
|
||||
text = text.encode()
|
||||
|
||||
if text == b'::':
|
||||
text = b'0::'
|
||||
#
|
||||
# Get rid of the icky dot-quad syntax if we have it.
|
||||
#
|
||||
m = _v4_ending.match(text)
|
||||
if not m is None:
|
||||
b = bytearray(dns.ipv4.inet_aton(m.group(2)))
|
||||
text = (u"%s:%02x%02x:%02x%02x" % (m.group(1).decode(), b[0], b[1],
|
||||
b[2], b[3])).encode()
|
||||
#
|
||||
# Try to turn '::<whatever>' into ':<whatever>'; if no match try to
|
||||
# turn '<whatever>::' into '<whatever>:'
|
||||
#
|
||||
m = _colon_colon_start.match(text)
|
||||
if not m is None:
|
||||
text = text[1:]
|
||||
else:
|
||||
m = _colon_colon_end.match(text)
|
||||
if not m is None:
|
||||
text = text[:-1]
|
||||
#
|
||||
# Now canonicalize into 8 chunks of 4 hex digits each
|
||||
#
|
||||
chunks = text.split(b':')
|
||||
l = len(chunks)
|
||||
if l > 8:
|
||||
raise dns.exception.SyntaxError
|
||||
seen_empty = False
|
||||
canonical = []
|
||||
for c in chunks:
|
||||
if c == b'':
|
||||
if seen_empty:
|
||||
raise dns.exception.SyntaxError
|
||||
seen_empty = True
|
||||
for i in xrange(0, 8 - l + 1):
|
||||
canonical.append(b'0000')
|
||||
else:
|
||||
lc = len(c)
|
||||
if lc > 4:
|
||||
raise dns.exception.SyntaxError
|
||||
if lc != 4:
|
||||
c = (b'0' * (4 - lc)) + c
|
||||
canonical.append(c)
|
||||
if l < 8 and not seen_empty:
|
||||
raise dns.exception.SyntaxError
|
||||
text = b''.join(canonical)
|
||||
|
||||
#
|
||||
# Finally we can go to binary.
|
||||
#
|
||||
try:
|
||||
return binascii.unhexlify(text)
|
||||
except (binascii.Error, TypeError):
|
||||
raise dns.exception.SyntaxError
|
||||
|
||||
_mapped_prefix = b'\x00' * 10 + b'\xff\xff'
|
||||
|
||||
def is_mapped(address):
|
||||
return address.startswith(_mapped_prefix)
|
1153
lib/dns/message.py
Normal file
763
lib/dns/name.py
Normal file
@@ -0,0 +1,763 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS Names.
|
||||
|
||||
@var root: The DNS root name.
|
||||
@type root: dns.name.Name object
|
||||
@var empty: The empty DNS name.
|
||||
@type empty: dns.name.Name object
|
||||
"""
|
||||
|
||||
from io import BytesIO
|
||||
import struct
|
||||
import sys
|
||||
import copy
|
||||
import encodings.idna
|
||||
|
||||
import dns.exception
|
||||
import dns.wiredata
|
||||
|
||||
from ._compat import long, binary_type, text_type, unichr
|
||||
|
||||
try:
|
||||
maxint = sys.maxint
|
||||
except:
|
||||
maxint = (1 << (8 * struct.calcsize("P"))) / 2 - 1
|
||||
|
||||
NAMERELN_NONE = 0
|
||||
NAMERELN_SUPERDOMAIN = 1
|
||||
NAMERELN_SUBDOMAIN = 2
|
||||
NAMERELN_EQUAL = 3
|
||||
NAMERELN_COMMONANCESTOR = 4
|
||||
|
||||
|
||||
class EmptyLabel(dns.exception.SyntaxError):
|
||||
|
||||
"""A DNS label is empty."""
|
||||
|
||||
|
||||
class BadEscape(dns.exception.SyntaxError):
|
||||
|
||||
"""An escaped code in a text format of DNS name is invalid."""
|
||||
|
||||
|
||||
class BadPointer(dns.exception.FormError):
|
||||
|
||||
"""A DNS compression pointer points forward instead of backward."""
|
||||
|
||||
|
||||
class BadLabelType(dns.exception.FormError):
|
||||
|
||||
"""The label type in DNS name wire format is unknown."""
|
||||
|
||||
|
||||
class NeedAbsoluteNameOrOrigin(dns.exception.DNSException):
|
||||
|
||||
"""An attempt was made to convert a non-absolute name to
|
||||
wire when there was also a non-absolute (or missing) origin."""
|
||||
|
||||
|
||||
class NameTooLong(dns.exception.FormError):
|
||||
|
||||
"""A DNS name is > 255 octets long."""
|
||||
|
||||
|
||||
class LabelTooLong(dns.exception.SyntaxError):
|
||||
|
||||
"""A DNS label is > 63 octets long."""
|
||||
|
||||
|
||||
class AbsoluteConcatenation(dns.exception.DNSException):
|
||||
|
||||
"""An attempt was made to append anything other than the
|
||||
empty name to an absolute DNS name."""
|
||||
|
||||
|
||||
class NoParent(dns.exception.DNSException):
|
||||
|
||||
"""An attempt was made to get the parent of the root name
|
||||
or the empty name."""
|
||||
|
||||
_escaped = bytearray(b'"().;\\@$')
|
||||
|
||||
|
||||
def _escapify(label, unicode_mode=False):
|
||||
"""Escape the characters in label which need it.
|
||||
@param unicode_mode: escapify only special and whitespace (<= 0x20)
|
||||
characters
|
||||
@returns: the escaped string
|
||||
@rtype: string"""
|
||||
if not unicode_mode:
|
||||
text = ''
|
||||
if isinstance(label, text_type):
|
||||
label = label.encode()
|
||||
for c in bytearray(label):
|
||||
packed = struct.pack('!B', c).decode()
|
||||
if c in _escaped:
|
||||
text += '\\' + packed
|
||||
elif c > 0x20 and c < 0x7F:
|
||||
text += packed
|
||||
else:
|
||||
text += '\\%03d' % c
|
||||
return text.encode()
|
||||
|
||||
text = u''
|
||||
if isinstance(label, binary_type):
|
||||
label = label.decode()
|
||||
for c in label:
|
||||
if c > u'\x20' and c < u'\x7f':
|
||||
text += c
|
||||
else:
|
||||
if c >= u'\x7f':
|
||||
text += c
|
||||
else:
|
||||
text += u'\\%03d' % c
|
||||
return text
|
||||
|
||||
|
||||
def _validate_labels(labels):
|
||||
"""Check for empty labels in the middle of a label sequence,
|
||||
labels that are too long, and for too many labels.
|
||||
@raises NameTooLong: the name as a whole is too long
|
||||
@raises EmptyLabel: a label is empty (i.e. the root label) and appears
|
||||
in a position other than the end of the label sequence"""
|
||||
|
||||
l = len(labels)
|
||||
total = 0
|
||||
i = -1
|
||||
j = 0
|
||||
for label in labels:
|
||||
ll = len(label)
|
||||
total += ll + 1
|
||||
if ll > 63:
|
||||
raise LabelTooLong
|
||||
if i < 0 and label == b'':
|
||||
i = j
|
||||
j += 1
|
||||
if total > 255:
|
||||
raise NameTooLong
|
||||
if i >= 0 and i != l - 1:
|
||||
raise EmptyLabel
|
||||
|
||||
|
||||
def _ensure_bytes(label):
|
||||
if isinstance(label, binary_type):
|
||||
return label
|
||||
if isinstance(label, text_type):
|
||||
return label.encode()
|
||||
raise ValueError
|
||||
|
||||
|
||||
class Name(object):
|
||||
|
||||
"""A DNS name.
|
||||
|
||||
The dns.name.Name class represents a DNS name as a tuple of labels.
|
||||
Instances of the class are immutable.
|
||||
|
||||
@ivar labels: The tuple of labels in the name. Each label is a string of
|
||||
up to 63 octets."""
|
||||
|
||||
__slots__ = ['labels']
|
||||
|
||||
def __init__(self, labels):
|
||||
"""Initialize a domain name from a list of labels.
|
||||
@param labels: the labels
|
||||
@type labels: any iterable whose values are strings
|
||||
"""
|
||||
labels = [_ensure_bytes(x) for x in labels]
|
||||
super(Name, self).__setattr__('labels', tuple(labels))
|
||||
_validate_labels(self.labels)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
raise TypeError("object doesn't support attribute assignment")
|
||||
|
||||
def __copy__(self):
|
||||
return Name(self.labels)
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
return Name(copy.deepcopy(self.labels, memo))
|
||||
|
||||
def __getstate__(self):
|
||||
return {'labels': self.labels}
|
||||
|
||||
def __setstate__(self, state):
|
||||
super(Name, self).__setattr__('labels', state['labels'])
|
||||
_validate_labels(self.labels)
|
||||
|
||||
def is_absolute(self):
|
||||
"""Is the most significant label of this name the root label?
|
||||
@rtype: bool
|
||||
"""
|
||||
|
||||
return len(self.labels) > 0 and self.labels[-1] == b''
|
||||
|
||||
def is_wild(self):
|
||||
"""Is this name wild? (I.e. Is the least significant label '*'?)
|
||||
@rtype: bool
|
||||
"""
|
||||
|
||||
return len(self.labels) > 0 and self.labels[0] == b'*'
|
||||
|
||||
def __hash__(self):
|
||||
"""Return a case-insensitive hash of the name.
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
h = long(0)
|
||||
for label in self.labels:
|
||||
for c in bytearray(label.lower()):
|
||||
h += (h << 3) + c
|
||||
return int(h % maxint)
|
||||
|
||||
def fullcompare(self, other):
|
||||
"""Compare two names, returning a 3-tuple (relation, order, nlabels).
|
||||
|
||||
I{relation} describes the relation ship between the names,
|
||||
and is one of: dns.name.NAMERELN_NONE,
|
||||
dns.name.NAMERELN_SUPERDOMAIN, dns.name.NAMERELN_SUBDOMAIN,
|
||||
dns.name.NAMERELN_EQUAL, or dns.name.NAMERELN_COMMONANCESTOR
|
||||
|
||||
I{order} is < 0 if self < other, > 0 if self > other, and ==
|
||||
0 if self == other. A relative name is always less than an
|
||||
absolute name. If both names have the same relativity, then
|
||||
the DNSSEC order relation is used to order them.
|
||||
|
||||
I{nlabels} is the number of significant labels that the two names
|
||||
have in common.
|
||||
"""
|
||||
|
||||
sabs = self.is_absolute()
|
||||
oabs = other.is_absolute()
|
||||
if sabs != oabs:
|
||||
if sabs:
|
||||
return (NAMERELN_NONE, 1, 0)
|
||||
else:
|
||||
return (NAMERELN_NONE, -1, 0)
|
||||
l1 = len(self.labels)
|
||||
l2 = len(other.labels)
|
||||
ldiff = l1 - l2
|
||||
if ldiff < 0:
|
||||
l = l1
|
||||
else:
|
||||
l = l2
|
||||
|
||||
order = 0
|
||||
nlabels = 0
|
||||
namereln = NAMERELN_NONE
|
||||
while l > 0:
|
||||
l -= 1
|
||||
l1 -= 1
|
||||
l2 -= 1
|
||||
label1 = self.labels[l1].lower()
|
||||
label2 = other.labels[l2].lower()
|
||||
if label1 < label2:
|
||||
order = -1
|
||||
if nlabels > 0:
|
||||
namereln = NAMERELN_COMMONANCESTOR
|
||||
return (namereln, order, nlabels)
|
||||
elif label1 > label2:
|
||||
order = 1
|
||||
if nlabels > 0:
|
||||
namereln = NAMERELN_COMMONANCESTOR
|
||||
return (namereln, order, nlabels)
|
||||
nlabels += 1
|
||||
order = ldiff
|
||||
if ldiff < 0:
|
||||
namereln = NAMERELN_SUPERDOMAIN
|
||||
elif ldiff > 0:
|
||||
namereln = NAMERELN_SUBDOMAIN
|
||||
else:
|
||||
namereln = NAMERELN_EQUAL
|
||||
return (namereln, order, nlabels)
|
||||
|
||||
def is_subdomain(self, other):
|
||||
"""Is self a subdomain of other?
|
||||
|
||||
The notion of subdomain includes equality.
|
||||
@rtype: bool
|
||||
"""
|
||||
|
||||
(nr, o, nl) = self.fullcompare(other)
|
||||
if nr == NAMERELN_SUBDOMAIN or nr == NAMERELN_EQUAL:
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_superdomain(self, other):
|
||||
"""Is self a superdomain of other?
|
||||
|
||||
The notion of subdomain includes equality.
|
||||
@rtype: bool
|
||||
"""
|
||||
|
||||
(nr, o, nl) = self.fullcompare(other)
|
||||
if nr == NAMERELN_SUPERDOMAIN or nr == NAMERELN_EQUAL:
|
||||
return True
|
||||
return False
|
||||
|
||||
def canonicalize(self):
|
||||
"""Return a name which is equal to the current name, but is in
|
||||
DNSSEC canonical form.
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
|
||||
return Name([x.lower() for x in self.labels])
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, Name):
|
||||
return self.fullcompare(other)[1] == 0
|
||||
else:
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
if isinstance(other, Name):
|
||||
return self.fullcompare(other)[1] != 0
|
||||
else:
|
||||
return True
|
||||
|
||||
def __lt__(self, other):
|
||||
if isinstance(other, Name):
|
||||
return self.fullcompare(other)[1] < 0
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __le__(self, other):
|
||||
if isinstance(other, Name):
|
||||
return self.fullcompare(other)[1] <= 0
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __ge__(self, other):
|
||||
if isinstance(other, Name):
|
||||
return self.fullcompare(other)[1] >= 0
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __gt__(self, other):
|
||||
if isinstance(other, Name):
|
||||
return self.fullcompare(other)[1] > 0
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __repr__(self):
|
||||
return '<DNS name ' + self.__str__() + '>'
|
||||
|
||||
def __str__(self):
|
||||
return self.to_text(False).decode()
|
||||
|
||||
def to_text(self, omit_final_dot=False):
|
||||
"""Convert name to text format.
|
||||
@param omit_final_dot: If True, don't emit the final dot (denoting the
|
||||
root label) for absolute names. The default is False.
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
if len(self.labels) == 0:
|
||||
return b'@'
|
||||
if len(self.labels) == 1 and self.labels[0] == b'':
|
||||
return b'.'
|
||||
if omit_final_dot and self.is_absolute():
|
||||
l = self.labels[:-1]
|
||||
else:
|
||||
l = self.labels
|
||||
s = b'.'.join(map(_escapify, l))
|
||||
return s
|
||||
|
||||
def to_unicode(self, omit_final_dot=False):
|
||||
"""Convert name to Unicode text format.
|
||||
|
||||
IDN ACE labels are converted to Unicode.
|
||||
|
||||
@param omit_final_dot: If True, don't emit the final dot (denoting the
|
||||
root label) for absolute names. The default is False.
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
if len(self.labels) == 0:
|
||||
return u'@'
|
||||
if len(self.labels) == 1 and self.labels[0] == '':
|
||||
return u'.'
|
||||
if omit_final_dot and self.is_absolute():
|
||||
l = self.labels[:-1]
|
||||
else:
|
||||
l = self.labels
|
||||
s = u'.'.join([_escapify(encodings.idna.ToUnicode(x), True)
|
||||
for x in l])
|
||||
return s
|
||||
|
||||
def to_digestable(self, origin=None):
|
||||
"""Convert name to a format suitable for digesting in hashes.
|
||||
|
||||
The name is canonicalized and converted to uncompressed wire format.
|
||||
|
||||
@param origin: If the name is relative and origin is not None, then
|
||||
origin will be appended to it.
|
||||
@type origin: dns.name.Name object
|
||||
@raises NeedAbsoluteNameOrOrigin: All names in wire format are
|
||||
absolute. If self is a relative name, then an origin must be supplied;
|
||||
if it is missing, then this exception is raised
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
if not self.is_absolute():
|
||||
if origin is None or not origin.is_absolute():
|
||||
raise NeedAbsoluteNameOrOrigin
|
||||
labels = list(self.labels)
|
||||
labels.extend(list(origin.labels))
|
||||
else:
|
||||
labels = self.labels
|
||||
dlabels = [struct.pack('!B%ds' % len(x), len(x), x.lower())
|
||||
for x in labels]
|
||||
return b''.join(dlabels)
|
||||
|
||||
def to_wire(self, file=None, compress=None, origin=None):
|
||||
"""Convert name to wire format, possibly compressing it.
|
||||
|
||||
@param file: the file where the name is emitted (typically
|
||||
a BytesIO file). If None, a string containing the wire name
|
||||
will be returned.
|
||||
@type file: file or None
|
||||
@param compress: The compression table. If None (the default) names
|
||||
will not be compressed.
|
||||
@type compress: dict
|
||||
@param origin: If the name is relative and origin is not None, then
|
||||
origin will be appended to it.
|
||||
@type origin: dns.name.Name object
|
||||
@raises NeedAbsoluteNameOrOrigin: All names in wire format are
|
||||
absolute. If self is a relative name, then an origin must be supplied;
|
||||
if it is missing, then this exception is raised
|
||||
"""
|
||||
|
||||
if file is None:
|
||||
file = BytesIO()
|
||||
want_return = True
|
||||
else:
|
||||
want_return = False
|
||||
|
||||
if not self.is_absolute():
|
||||
if origin is None or not origin.is_absolute():
|
||||
raise NeedAbsoluteNameOrOrigin
|
||||
labels = list(self.labels)
|
||||
labels.extend(list(origin.labels))
|
||||
else:
|
||||
labels = self.labels
|
||||
i = 0
|
||||
for label in labels:
|
||||
n = Name(labels[i:])
|
||||
i += 1
|
||||
if compress is not None:
|
||||
pos = compress.get(n)
|
||||
else:
|
||||
pos = None
|
||||
if pos is not None:
|
||||
value = 0xc000 + pos
|
||||
s = struct.pack('!H', value)
|
||||
file.write(s)
|
||||
break
|
||||
else:
|
||||
if compress is not None and len(n) > 1:
|
||||
pos = file.tell()
|
||||
if pos <= 0x3fff:
|
||||
compress[n] = pos
|
||||
l = len(label)
|
||||
file.write(struct.pack('!B', l))
|
||||
if l > 0:
|
||||
file.write(label)
|
||||
if want_return:
|
||||
return file.getvalue()
|
||||
|
||||
def __len__(self):
|
||||
"""The length of the name (in labels).
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
return len(self.labels)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self.labels[index]
|
||||
|
||||
def __getslice__(self, start, stop):
|
||||
return self.labels[start:stop]
|
||||
|
||||
def __add__(self, other):
|
||||
return self.concatenate(other)
|
||||
|
||||
def __sub__(self, other):
|
||||
return self.relativize(other)
|
||||
|
||||
def split(self, depth):
|
||||
"""Split a name into a prefix and suffix at depth.
|
||||
|
||||
@param depth: the number of labels in the suffix
|
||||
@type depth: int
|
||||
@raises ValueError: the depth was not >= 0 and <= the length of the
|
||||
name.
|
||||
@returns: the tuple (prefix, suffix)
|
||||
@rtype: tuple
|
||||
"""
|
||||
|
||||
l = len(self.labels)
|
||||
if depth == 0:
|
||||
return (self, dns.name.empty)
|
||||
elif depth == l:
|
||||
return (dns.name.empty, self)
|
||||
elif depth < 0 or depth > l:
|
||||
raise ValueError(
|
||||
'depth must be >= 0 and <= the length of the name')
|
||||
return (Name(self[: -depth]), Name(self[-depth:]))
|
||||
|
||||
def concatenate(self, other):
|
||||
"""Return a new name which is the concatenation of self and other.
|
||||
@rtype: dns.name.Name object
|
||||
@raises AbsoluteConcatenation: self is absolute and other is
|
||||
not the empty name
|
||||
"""
|
||||
|
||||
if self.is_absolute() and len(other) > 0:
|
||||
raise AbsoluteConcatenation
|
||||
labels = list(self.labels)
|
||||
labels.extend(list(other.labels))
|
||||
return Name(labels)
|
||||
|
||||
def relativize(self, origin):
|
||||
"""If self is a subdomain of origin, return a new name which is self
|
||||
relative to origin. Otherwise return self.
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
|
||||
if origin is not None and self.is_subdomain(origin):
|
||||
return Name(self[: -len(origin)])
|
||||
else:
|
||||
return self
|
||||
|
||||
def derelativize(self, origin):
|
||||
"""If self is a relative name, return a new name which is the
|
||||
concatenation of self and origin. Otherwise return self.
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
|
||||
if not self.is_absolute():
|
||||
return self.concatenate(origin)
|
||||
else:
|
||||
return self
|
||||
|
||||
def choose_relativity(self, origin=None, relativize=True):
|
||||
"""Return a name with the relativity desired by the caller. If
|
||||
origin is None, then self is returned. Otherwise, if
|
||||
relativize is true the name is relativized, and if relativize is
|
||||
false the name is derelativized.
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
|
||||
if origin:
|
||||
if relativize:
|
||||
return self.relativize(origin)
|
||||
else:
|
||||
return self.derelativize(origin)
|
||||
else:
|
||||
return self
|
||||
|
||||
def parent(self):
|
||||
"""Return the parent of the name.
|
||||
@rtype: dns.name.Name object
|
||||
@raises NoParent: the name is either the root name or the empty name,
|
||||
and thus has no parent.
|
||||
"""
|
||||
if self == root or self == empty:
|
||||
raise NoParent
|
||||
return Name(self.labels[1:])
|
||||
|
||||
root = Name([b''])
|
||||
empty = Name([])
|
||||
|
||||
|
||||
def from_unicode(text, origin=root):
|
||||
"""Convert unicode text into a Name object.
|
||||
|
||||
Labels are encoded in IDN ACE form.
|
||||
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
|
||||
if not isinstance(text, text_type):
|
||||
raise ValueError("input to from_unicode() must be a unicode string")
|
||||
if not (origin is None or isinstance(origin, Name)):
|
||||
raise ValueError("origin must be a Name or None")
|
||||
labels = []
|
||||
label = u''
|
||||
escaping = False
|
||||
edigits = 0
|
||||
total = 0
|
||||
if text == u'@':
|
||||
text = u''
|
||||
if text:
|
||||
if text == u'.':
|
||||
return Name([b'']) # no Unicode "u" on this constant!
|
||||
for c in text:
|
||||
if escaping:
|
||||
if edigits == 0:
|
||||
if c.isdigit():
|
||||
total = int(c)
|
||||
edigits += 1
|
||||
else:
|
||||
label += c
|
||||
escaping = False
|
||||
else:
|
||||
if not c.isdigit():
|
||||
raise BadEscape
|
||||
total *= 10
|
||||
total += int(c)
|
||||
edigits += 1
|
||||
if edigits == 3:
|
||||
escaping = False
|
||||
label += unichr(total)
|
||||
elif c in [u'.', u'\u3002', u'\uff0e', u'\uff61']:
|
||||
if len(label) == 0:
|
||||
raise EmptyLabel
|
||||
try:
|
||||
labels.append(encodings.idna.ToASCII(label))
|
||||
except UnicodeError:
|
||||
raise LabelTooLong
|
||||
label = u''
|
||||
elif c == u'\\':
|
||||
escaping = True
|
||||
edigits = 0
|
||||
total = 0
|
||||
else:
|
||||
label += c
|
||||
if escaping:
|
||||
raise BadEscape
|
||||
if len(label) > 0:
|
||||
try:
|
||||
labels.append(encodings.idna.ToASCII(label))
|
||||
except UnicodeError:
|
||||
raise LabelTooLong
|
||||
else:
|
||||
labels.append(b'')
|
||||
|
||||
if (len(labels) == 0 or labels[-1] != b'') and origin is not None:
|
||||
labels.extend(list(origin.labels))
|
||||
return Name(labels)
|
||||
|
||||
|
||||
def from_text(text, origin=root):
|
||||
"""Convert text into a Name object.
|
||||
@rtype: dns.name.Name object
|
||||
"""
|
||||
|
||||
if isinstance(text, text_type):
|
||||
return from_unicode(text, origin)
|
||||
if not isinstance(text, binary_type):
|
||||
raise ValueError("input to from_text() must be a string")
|
||||
if not (origin is None or isinstance(origin, Name)):
|
||||
raise ValueError("origin must be a Name or None")
|
||||
labels = []
|
||||
label = b''
|
||||
escaping = False
|
||||
edigits = 0
|
||||
total = 0
|
||||
if text == b'@':
|
||||
text = b''
|
||||
if text:
|
||||
if text == b'.':
|
||||
return Name([b''])
|
||||
for c in bytearray(text):
|
||||
byte_ = struct.pack('!B', c)
|
||||
if escaping:
|
||||
if edigits == 0:
|
||||
if byte_.isdigit():
|
||||
total = int(byte_)
|
||||
edigits += 1
|
||||
else:
|
||||
label += byte_
|
||||
escaping = False
|
||||
else:
|
||||
if not byte_.isdigit():
|
||||
raise BadEscape
|
||||
total *= 10
|
||||
total += int(byte_)
|
||||
edigits += 1
|
||||
if edigits == 3:
|
||||
escaping = False
|
||||
label += struct.pack('!B', total)
|
||||
elif byte_ == b'.':
|
||||
if len(label) == 0:
|
||||
raise EmptyLabel
|
||||
labels.append(label)
|
||||
label = b''
|
||||
elif byte_ == b'\\':
|
||||
escaping = True
|
||||
edigits = 0
|
||||
total = 0
|
||||
else:
|
||||
label += byte_
|
||||
if escaping:
|
||||
raise BadEscape
|
||||
if len(label) > 0:
|
||||
labels.append(label)
|
||||
else:
|
||||
labels.append(b'')
|
||||
if (len(labels) == 0 or labels[-1] != b'') and origin is not None:
|
||||
labels.extend(list(origin.labels))
|
||||
return Name(labels)
|
||||
|
||||
|
||||
def from_wire(message, current):
|
||||
"""Convert possibly compressed wire format into a Name.
|
||||
@param message: the entire DNS message
|
||||
@type message: string
|
||||
@param current: the offset of the beginning of the name from the start
|
||||
of the message
|
||||
@type current: int
|
||||
@raises dns.name.BadPointer: a compression pointer did not point backwards
|
||||
in the message
|
||||
@raises dns.name.BadLabelType: an invalid label type was encountered.
|
||||
@returns: a tuple consisting of the name that was read and the number
|
||||
of bytes of the wire format message which were consumed reading it
|
||||
@rtype: (dns.name.Name object, int) tuple
|
||||
"""
|
||||
|
||||
if not isinstance(message, binary_type):
|
||||
raise ValueError("input to from_wire() must be a byte string")
|
||||
message = dns.wiredata.maybe_wrap(message)
|
||||
labels = []
|
||||
biggest_pointer = current
|
||||
hops = 0
|
||||
count = message[current]
|
||||
current += 1
|
||||
cused = 1
|
||||
while count != 0:
|
||||
if count < 64:
|
||||
labels.append(message[current: current + count].unwrap())
|
||||
current += count
|
||||
if hops == 0:
|
||||
cused += count
|
||||
elif count >= 192:
|
||||
current = (count & 0x3f) * 256 + message[current]
|
||||
if hops == 0:
|
||||
cused += 1
|
||||
if current >= biggest_pointer:
|
||||
raise BadPointer
|
||||
biggest_pointer = current
|
||||
hops += 1
|
||||
else:
|
||||
raise BadLabelType
|
||||
count = message[current]
|
||||
current += 1
|
||||
if hops == 0:
|
||||
cused += 1
|
||||
labels.append('')
|
||||
return (Name(labels), cused)
|
104
lib/dns/namedict.py
Normal file
@@ -0,0 +1,104 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
# Copyright (C) 2016 Coresec Systems AB
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND CORESEC SYSTEMS AB DISCLAIMS ALL
|
||||
# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL CORESEC
|
||||
# SYSTEMS AB BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
|
||||
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
||||
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
|
||||
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
|
||||
# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS name dictionary"""
|
||||
|
||||
import collections
|
||||
import dns.name
|
||||
from ._compat import xrange
|
||||
|
||||
|
||||
class NameDict(collections.MutableMapping):
|
||||
|
||||
"""A dictionary whose keys are dns.name.Name objects.
|
||||
@ivar max_depth: the maximum depth of the keys that have ever been
|
||||
added to the dictionary.
|
||||
@type max_depth: int
|
||||
@ivar max_depth_items: the number of items of maximum depth
|
||||
@type max_depth_items: int
|
||||
"""
|
||||
|
||||
__slots__ = ["max_depth", "max_depth_items", "__store"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.__store = dict()
|
||||
self.max_depth = 0
|
||||
self.max_depth_items = 0
|
||||
self.update(dict(*args, **kwargs))
|
||||
|
||||
def __update_max_depth(self, key):
|
||||
if len(key) == self.max_depth:
|
||||
self.max_depth_items = self.max_depth_items + 1
|
||||
elif len(key) > self.max_depth:
|
||||
self.max_depth = len(key)
|
||||
self.max_depth_items = 1
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.__store[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if not isinstance(key, dns.name.Name):
|
||||
raise ValueError('NameDict key must be a name')
|
||||
self.__store[key] = value
|
||||
self.__update_max_depth(key)
|
||||
|
||||
def __delitem__(self, key):
|
||||
value = self.__store.pop(key)
|
||||
if len(value) == self.max_depth:
|
||||
self.max_depth_items = self.max_depth_items - 1
|
||||
if self.max_depth_items == 0:
|
||||
self.max_depth = 0
|
||||
for k in self.__store:
|
||||
self.__update_max_depth(k)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__store)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.__store)
|
||||
|
||||
def has_key(self, key):
|
||||
return key in self.__store
|
||||
|
||||
def get_deepest_match(self, name):
|
||||
"""Find the deepest match to I{name} in the dictionary.
|
||||
|
||||
The deepest match is the longest name in the dictionary which is
|
||||
a superdomain of I{name}.
|
||||
|
||||
@param name: the name
|
||||
@type name: dns.name.Name object
|
||||
@rtype: (key, value) tuple
|
||||
"""
|
||||
|
||||
depth = len(name)
|
||||
if depth > self.max_depth:
|
||||
depth = self.max_depth
|
||||
for i in xrange(-depth, 0):
|
||||
n = dns.name.Name(name[i:])
|
||||
if n in self:
|
||||
return (n, self[n])
|
||||
v = self[dns.name.empty]
|
||||
return (dns.name.empty, v)
|
178
lib/dns/node.py
Normal file
@@ -0,0 +1,178 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS nodes. A node is a set of rdatasets."""
|
||||
|
||||
from io import StringIO
|
||||
|
||||
import dns.rdataset
|
||||
import dns.rdatatype
|
||||
import dns.renderer
|
||||
|
||||
|
||||
class Node(object):
|
||||
|
||||
"""A DNS node.
|
||||
|
||||
A node is a set of rdatasets
|
||||
|
||||
@ivar rdatasets: the node's rdatasets
|
||||
@type rdatasets: list of dns.rdataset.Rdataset objects"""
|
||||
|
||||
__slots__ = ['rdatasets']
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize a DNS node.
|
||||
"""
|
||||
|
||||
self.rdatasets = []
|
||||
|
||||
def to_text(self, name, **kw):
|
||||
"""Convert a node to text format.
|
||||
|
||||
Each rdataset at the node is printed. Any keyword arguments
|
||||
to this method are passed on to the rdataset's to_text() method.
|
||||
@param name: the owner name of the rdatasets
|
||||
@type name: dns.name.Name object
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
s = StringIO()
|
||||
for rds in self.rdatasets:
|
||||
if len(rds) > 0:
|
||||
s.write(rds.to_text(name, **kw))
|
||||
s.write(u'\n')
|
||||
return s.getvalue()[:-1]
|
||||
|
||||
def __repr__(self):
|
||||
return '<DNS node ' + str(id(self)) + '>'
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Two nodes are equal if they have the same rdatasets.
|
||||
|
||||
@rtype: bool
|
||||
"""
|
||||
#
|
||||
# This is inefficient. Good thing we don't need to do it much.
|
||||
#
|
||||
for rd in self.rdatasets:
|
||||
if rd not in other.rdatasets:
|
||||
return False
|
||||
for rd in other.rdatasets:
|
||||
if rd not in self.rdatasets:
|
||||
return False
|
||||
return True
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.rdatasets)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.rdatasets)
|
||||
|
||||
def find_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE,
|
||||
create=False):
|
||||
"""Find an rdataset matching the specified properties in the
|
||||
current node.
|
||||
|
||||
@param rdclass: The class of the rdataset
|
||||
@type rdclass: int
|
||||
@param rdtype: The type of the rdataset
|
||||
@type rdtype: int
|
||||
@param covers: The covered type. Usually this value is
|
||||
dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or
|
||||
dns.rdatatype.RRSIG, then the covers value will be the rdata
|
||||
type the SIG/RRSIG covers. The library treats the SIG and RRSIG
|
||||
types as if they were a family of
|
||||
types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much
|
||||
easier to work with than if RRSIGs covering different rdata
|
||||
types were aggregated into a single RRSIG rdataset.
|
||||
@type covers: int
|
||||
@param create: If True, create the rdataset if it is not found.
|
||||
@type create: bool
|
||||
@raises KeyError: An rdataset of the desired type and class does
|
||||
not exist and I{create} is not True.
|
||||
@rtype: dns.rdataset.Rdataset object
|
||||
"""
|
||||
|
||||
for rds in self.rdatasets:
|
||||
if rds.match(rdclass, rdtype, covers):
|
||||
return rds
|
||||
if not create:
|
||||
raise KeyError
|
||||
rds = dns.rdataset.Rdataset(rdclass, rdtype)
|
||||
self.rdatasets.append(rds)
|
||||
return rds
|
||||
|
||||
def get_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE,
|
||||
create=False):
|
||||
"""Get an rdataset matching the specified properties in the
|
||||
current node.
|
||||
|
||||
None is returned if an rdataset of the specified type and
|
||||
class does not exist and I{create} is not True.
|
||||
|
||||
@param rdclass: The class of the rdataset
|
||||
@type rdclass: int
|
||||
@param rdtype: The type of the rdataset
|
||||
@type rdtype: int
|
||||
@param covers: The covered type.
|
||||
@type covers: int
|
||||
@param create: If True, create the rdataset if it is not found.
|
||||
@type create: bool
|
||||
@rtype: dns.rdataset.Rdataset object or None
|
||||
"""
|
||||
|
||||
try:
|
||||
rds = self.find_rdataset(rdclass, rdtype, covers, create)
|
||||
except KeyError:
|
||||
rds = None
|
||||
return rds
|
||||
|
||||
def delete_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE):
|
||||
"""Delete the rdataset matching the specified properties in the
|
||||
current node.
|
||||
|
||||
If a matching rdataset does not exist, it is not an error.
|
||||
|
||||
@param rdclass: The class of the rdataset
|
||||
@type rdclass: int
|
||||
@param rdtype: The type of the rdataset
|
||||
@type rdtype: int
|
||||
@param covers: The covered type.
|
||||
@type covers: int
|
||||
"""
|
||||
|
||||
rds = self.get_rdataset(rdclass, rdtype, covers)
|
||||
if rds is not None:
|
||||
self.rdatasets.remove(rds)
|
||||
|
||||
def replace_rdataset(self, replacement):
|
||||
"""Replace an rdataset.
|
||||
|
||||
It is not an error if there is no rdataset matching I{replacement}.
|
||||
|
||||
Ownership of the I{replacement} object is transferred to the node;
|
||||
in other words, this method does not store a copy of I{replacement}
|
||||
at the node, it stores I{replacement} itself.
|
||||
"""
|
||||
|
||||
if not isinstance(replacement, dns.rdataset.Rdataset):
|
||||
raise ValueError('replacement is not an rdataset')
|
||||
self.delete_rdataset(replacement.rdclass, replacement.rdtype,
|
||||
replacement.covers)
|
||||
self.rdatasets.append(replacement)
|
109
lib/dns/opcode.py
Normal file
@@ -0,0 +1,109 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS Opcodes."""
|
||||
|
||||
import dns.exception
|
||||
|
||||
QUERY = 0
|
||||
IQUERY = 1
|
||||
STATUS = 2
|
||||
NOTIFY = 4
|
||||
UPDATE = 5
|
||||
|
||||
_by_text = {
|
||||
'QUERY': QUERY,
|
||||
'IQUERY': IQUERY,
|
||||
'STATUS': STATUS,
|
||||
'NOTIFY': NOTIFY,
|
||||
'UPDATE': UPDATE
|
||||
}
|
||||
|
||||
# We construct the inverse mapping programmatically to ensure that we
|
||||
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
|
||||
# would cause the mapping not to be true inverse.
|
||||
|
||||
_by_value = dict((y, x) for x, y in _by_text.items())
|
||||
|
||||
|
||||
class UnknownOpcode(dns.exception.DNSException):
|
||||
|
||||
"""An DNS opcode is unknown."""
|
||||
|
||||
|
||||
def from_text(text):
|
||||
"""Convert text into an opcode.
|
||||
|
||||
@param text: the textual opcode
|
||||
@type text: string
|
||||
@raises UnknownOpcode: the opcode is unknown
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
if text.isdigit():
|
||||
value = int(text)
|
||||
if value >= 0 and value <= 15:
|
||||
return value
|
||||
value = _by_text.get(text.upper())
|
||||
if value is None:
|
||||
raise UnknownOpcode
|
||||
return value
|
||||
|
||||
|
||||
def from_flags(flags):
|
||||
"""Extract an opcode from DNS message flags.
|
||||
|
||||
@param flags: int
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
return (flags & 0x7800) >> 11
|
||||
|
||||
|
||||
def to_flags(value):
|
||||
"""Convert an opcode to a value suitable for ORing into DNS message
|
||||
flags.
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
return (value << 11) & 0x7800
|
||||
|
||||
|
||||
def to_text(value):
|
||||
"""Convert an opcode to text.
|
||||
|
||||
@param value: the opcdoe
|
||||
@type value: int
|
||||
@raises UnknownOpcode: the opcode is unknown
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
text = _by_value.get(value)
|
||||
if text is None:
|
||||
text = str(value)
|
||||
return text
|
||||
|
||||
|
||||
def is_update(flags):
|
||||
"""True if the opcode in flags is UPDATE.
|
||||
|
||||
@param flags: DNS flags
|
||||
@type flags: int
|
||||
@rtype: bool
|
||||
"""
|
||||
|
||||
if (from_flags(flags) == UPDATE):
|
||||
return True
|
||||
return False
|
536
lib/dns/query.py
Normal file
@@ -0,0 +1,536 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""Talk to a DNS server."""
|
||||
|
||||
from __future__ import generators
|
||||
|
||||
import errno
|
||||
import select
|
||||
import socket
|
||||
import struct
|
||||
import sys
|
||||
import time
|
||||
|
||||
import dns.exception
|
||||
import dns.inet
|
||||
import dns.name
|
||||
import dns.message
|
||||
import dns.rdataclass
|
||||
import dns.rdatatype
|
||||
from ._compat import long, string_types
|
||||
|
||||
if sys.version_info > (3,):
|
||||
select_error = OSError
|
||||
else:
|
||||
select_error = select.error
|
||||
|
||||
|
||||
class UnexpectedSource(dns.exception.DNSException):
|
||||
|
||||
"""A DNS query response came from an unexpected address or port."""
|
||||
|
||||
|
||||
class BadResponse(dns.exception.FormError):
|
||||
|
||||
"""A DNS query response does not respond to the question asked."""
|
||||
|
||||
|
||||
def _compute_expiration(timeout):
|
||||
if timeout is None:
|
||||
return None
|
||||
else:
|
||||
return time.time() + timeout
|
||||
|
||||
|
||||
def _poll_for(fd, readable, writable, error, timeout):
|
||||
"""Poll polling backend.
|
||||
@param fd: File descriptor
|
||||
@type fd: int
|
||||
@param readable: Whether to wait for readability
|
||||
@type readable: bool
|
||||
@param writable: Whether to wait for writability
|
||||
@type writable: bool
|
||||
@param timeout: Deadline timeout (expiration time, in seconds)
|
||||
@type timeout: float
|
||||
@return True on success, False on timeout
|
||||
"""
|
||||
event_mask = 0
|
||||
if readable:
|
||||
event_mask |= select.POLLIN
|
||||
if writable:
|
||||
event_mask |= select.POLLOUT
|
||||
if error:
|
||||
event_mask |= select.POLLERR
|
||||
|
||||
pollable = select.poll()
|
||||
pollable.register(fd, event_mask)
|
||||
|
||||
if timeout:
|
||||
event_list = pollable.poll(long(timeout * 1000))
|
||||
else:
|
||||
event_list = pollable.poll()
|
||||
|
||||
return bool(event_list)
|
||||
|
||||
|
||||
def _select_for(fd, readable, writable, error, timeout):
|
||||
"""Select polling backend.
|
||||
@param fd: File descriptor
|
||||
@type fd: int
|
||||
@param readable: Whether to wait for readability
|
||||
@type readable: bool
|
||||
@param writable: Whether to wait for writability
|
||||
@type writable: bool
|
||||
@param timeout: Deadline timeout (expiration time, in seconds)
|
||||
@type timeout: float
|
||||
@return True on success, False on timeout
|
||||
"""
|
||||
rset, wset, xset = [], [], []
|
||||
|
||||
if readable:
|
||||
rset = [fd]
|
||||
if writable:
|
||||
wset = [fd]
|
||||
if error:
|
||||
xset = [fd]
|
||||
|
||||
if timeout is None:
|
||||
(rcount, wcount, xcount) = select.select(rset, wset, xset)
|
||||
else:
|
||||
(rcount, wcount, xcount) = select.select(rset, wset, xset, timeout)
|
||||
|
||||
return bool((rcount or wcount or xcount))
|
||||
|
||||
|
||||
def _wait_for(fd, readable, writable, error, expiration):
|
||||
done = False
|
||||
while not done:
|
||||
if expiration is None:
|
||||
timeout = None
|
||||
else:
|
||||
timeout = expiration - time.time()
|
||||
if timeout <= 0.0:
|
||||
raise dns.exception.Timeout
|
||||
try:
|
||||
if not _polling_backend(fd, readable, writable, error, timeout):
|
||||
raise dns.exception.Timeout
|
||||
except select_error as e:
|
||||
if e.args[0] != errno.EINTR:
|
||||
raise e
|
||||
done = True
|
||||
|
||||
|
||||
def _set_polling_backend(fn):
|
||||
"""
|
||||
Internal API. Do not use.
|
||||
"""
|
||||
global _polling_backend
|
||||
|
||||
_polling_backend = fn
|
||||
|
||||
if hasattr(select, 'poll'):
|
||||
# Prefer poll() on platforms that support it because it has no
|
||||
# limits on the maximum value of a file descriptor (plus it will
|
||||
# be more efficient for high values).
|
||||
_polling_backend = _poll_for
|
||||
else:
|
||||
_polling_backend = _select_for
|
||||
|
||||
|
||||
def _wait_for_readable(s, expiration):
|
||||
_wait_for(s, True, False, True, expiration)
|
||||
|
||||
|
||||
def _wait_for_writable(s, expiration):
|
||||
_wait_for(s, False, True, True, expiration)
|
||||
|
||||
|
||||
def _addresses_equal(af, a1, a2):
|
||||
# Convert the first value of the tuple, which is a textual format
|
||||
# address into binary form, so that we are not confused by different
|
||||
# textual representations of the same address
|
||||
n1 = dns.inet.inet_pton(af, a1[0])
|
||||
n2 = dns.inet.inet_pton(af, a2[0])
|
||||
return n1 == n2 and a1[1:] == a2[1:]
|
||||
|
||||
|
||||
def _destination_and_source(af, where, port, source, source_port):
|
||||
# Apply defaults and compute destination and source tuples
|
||||
# suitable for use in connect(), sendto(), or bind().
|
||||
if af is None:
|
||||
try:
|
||||
af = dns.inet.af_for_address(where)
|
||||
except:
|
||||
af = dns.inet.AF_INET
|
||||
if af == dns.inet.AF_INET:
|
||||
destination = (where, port)
|
||||
if source is not None or source_port != 0:
|
||||
if source is None:
|
||||
source = '0.0.0.0'
|
||||
source = (source, source_port)
|
||||
elif af == dns.inet.AF_INET6:
|
||||
destination = (where, port, 0, 0)
|
||||
if source is not None or source_port != 0:
|
||||
if source is None:
|
||||
source = '::'
|
||||
source = (source, source_port, 0, 0)
|
||||
return (af, destination, source)
|
||||
|
||||
|
||||
def udp(q, where, timeout=None, port=53, af=None, source=None, source_port=0,
|
||||
ignore_unexpected=False, one_rr_per_rrset=False):
|
||||
"""Return the response obtained after sending a query via UDP.
|
||||
|
||||
@param q: the query
|
||||
@type q: dns.message.Message
|
||||
@param where: where to send the message
|
||||
@type where: string containing an IPv4 or IPv6 address
|
||||
@param timeout: The number of seconds to wait before the query times out.
|
||||
If None, the default, wait forever.
|
||||
@type timeout: float
|
||||
@param port: The port to which to send the message. The default is 53.
|
||||
@type port: int
|
||||
@param af: the address family to use. The default is None, which
|
||||
causes the address family to use to be inferred from the form of where.
|
||||
If the inference attempt fails, AF_INET is used.
|
||||
@type af: int
|
||||
@rtype: dns.message.Message object
|
||||
@param source: source address. The default is the wildcard address.
|
||||
@type source: string
|
||||
@param source_port: The port from which to send the message.
|
||||
The default is 0.
|
||||
@type source_port: int
|
||||
@param ignore_unexpected: If True, ignore responses from unexpected
|
||||
sources. The default is False.
|
||||
@type ignore_unexpected: bool
|
||||
@param one_rr_per_rrset: Put each RR into its own RRset
|
||||
@type one_rr_per_rrset: bool
|
||||
"""
|
||||
|
||||
wire = q.to_wire()
|
||||
(af, destination, source) = _destination_and_source(af, where, port,
|
||||
source, source_port)
|
||||
s = socket.socket(af, socket.SOCK_DGRAM, 0)
|
||||
begin_time = None
|
||||
try:
|
||||
expiration = _compute_expiration(timeout)
|
||||
s.setblocking(0)
|
||||
if source is not None:
|
||||
s.bind(source)
|
||||
_wait_for_writable(s, expiration)
|
||||
begin_time = time.time()
|
||||
s.sendto(wire, destination)
|
||||
while 1:
|
||||
_wait_for_readable(s, expiration)
|
||||
(wire, from_address) = s.recvfrom(65535)
|
||||
if _addresses_equal(af, from_address, destination) or \
|
||||
(dns.inet.is_multicast(where) and
|
||||
from_address[1:] == destination[1:]):
|
||||
break
|
||||
if not ignore_unexpected:
|
||||
raise UnexpectedSource('got a response from '
|
||||
'%s instead of %s' % (from_address,
|
||||
destination))
|
||||
finally:
|
||||
if begin_time is None:
|
||||
response_time = 0
|
||||
else:
|
||||
response_time = time.time() - begin_time
|
||||
s.close()
|
||||
r = dns.message.from_wire(wire, keyring=q.keyring, request_mac=q.mac,
|
||||
one_rr_per_rrset=one_rr_per_rrset)
|
||||
r.time = response_time
|
||||
if not q.is_response(r):
|
||||
raise BadResponse
|
||||
return r
|
||||
|
||||
|
||||
def _net_read(sock, count, expiration):
|
||||
"""Read the specified number of bytes from sock. Keep trying until we
|
||||
either get the desired amount, or we hit EOF.
|
||||
A Timeout exception will be raised if the operation is not completed
|
||||
by the expiration time.
|
||||
"""
|
||||
s = b''
|
||||
while count > 0:
|
||||
_wait_for_readable(sock, expiration)
|
||||
n = sock.recv(count)
|
||||
if n == b'':
|
||||
raise EOFError
|
||||
count = count - len(n)
|
||||
s = s + n
|
||||
return s
|
||||
|
||||
|
||||
def _net_write(sock, data, expiration):
|
||||
"""Write the specified data to the socket.
|
||||
A Timeout exception will be raised if the operation is not completed
|
||||
by the expiration time.
|
||||
"""
|
||||
current = 0
|
||||
l = len(data)
|
||||
while current < l:
|
||||
_wait_for_writable(sock, expiration)
|
||||
current += sock.send(data[current:])
|
||||
|
||||
|
||||
def _connect(s, address):
|
||||
try:
|
||||
s.connect(address)
|
||||
except socket.error:
|
||||
(ty, v) = sys.exc_info()[:2]
|
||||
|
||||
if hasattr(v, 'errno'):
|
||||
v_err = v.errno
|
||||
else:
|
||||
v_err = v[0]
|
||||
if v_err not in [errno.EINPROGRESS, errno.EWOULDBLOCK, errno.EALREADY]:
|
||||
raise v
|
||||
|
||||
|
||||
def tcp(q, where, timeout=None, port=53, af=None, source=None, source_port=0,
|
||||
one_rr_per_rrset=False):
|
||||
"""Return the response obtained after sending a query via TCP.
|
||||
|
||||
@param q: the query
|
||||
@type q: dns.message.Message object
|
||||
@param where: where to send the message
|
||||
@type where: string containing an IPv4 or IPv6 address
|
||||
@param timeout: The number of seconds to wait before the query times out.
|
||||
If None, the default, wait forever.
|
||||
@type timeout: float
|
||||
@param port: The port to which to send the message. The default is 53.
|
||||
@type port: int
|
||||
@param af: the address family to use. The default is None, which
|
||||
causes the address family to use to be inferred from the form of where.
|
||||
If the inference attempt fails, AF_INET is used.
|
||||
@type af: int
|
||||
@rtype: dns.message.Message object
|
||||
@param source: source address. The default is the wildcard address.
|
||||
@type source: string
|
||||
@param source_port: The port from which to send the message.
|
||||
The default is 0.
|
||||
@type source_port: int
|
||||
@param one_rr_per_rrset: Put each RR into its own RRset
|
||||
@type one_rr_per_rrset: bool
|
||||
"""
|
||||
|
||||
wire = q.to_wire()
|
||||
(af, destination, source) = _destination_and_source(af, where, port,
|
||||
source, source_port)
|
||||
s = socket.socket(af, socket.SOCK_STREAM, 0)
|
||||
begin_time = None
|
||||
try:
|
||||
expiration = _compute_expiration(timeout)
|
||||
s.setblocking(0)
|
||||
begin_time = time.time()
|
||||
if source is not None:
|
||||
s.bind(source)
|
||||
_connect(s, destination)
|
||||
|
||||
l = len(wire)
|
||||
|
||||
# copying the wire into tcpmsg is inefficient, but lets us
|
||||
# avoid writev() or doing a short write that would get pushed
|
||||
# onto the net
|
||||
tcpmsg = struct.pack("!H", l) + wire
|
||||
_net_write(s, tcpmsg, expiration)
|
||||
ldata = _net_read(s, 2, expiration)
|
||||
(l,) = struct.unpack("!H", ldata)
|
||||
wire = _net_read(s, l, expiration)
|
||||
finally:
|
||||
if begin_time is None:
|
||||
response_time = 0
|
||||
else:
|
||||
response_time = time.time() - begin_time
|
||||
s.close()
|
||||
r = dns.message.from_wire(wire, keyring=q.keyring, request_mac=q.mac,
|
||||
one_rr_per_rrset=one_rr_per_rrset)
|
||||
r.time = response_time
|
||||
if not q.is_response(r):
|
||||
raise BadResponse
|
||||
return r
|
||||
|
||||
|
||||
def xfr(where, zone, rdtype=dns.rdatatype.AXFR, rdclass=dns.rdataclass.IN,
|
||||
timeout=None, port=53, keyring=None, keyname=None, relativize=True,
|
||||
af=None, lifetime=None, source=None, source_port=0, serial=0,
|
||||
use_udp=False, keyalgorithm=dns.tsig.default_algorithm):
|
||||
"""Return a generator for the responses to a zone transfer.
|
||||
|
||||
@param where: where to send the message
|
||||
@type where: string containing an IPv4 or IPv6 address
|
||||
@param zone: The name of the zone to transfer
|
||||
@type zone: dns.name.Name object or string
|
||||
@param rdtype: The type of zone transfer. The default is
|
||||
dns.rdatatype.AXFR.
|
||||
@type rdtype: int or string
|
||||
@param rdclass: The class of the zone transfer. The default is
|
||||
dns.rdataclass.IN.
|
||||
@type rdclass: int or string
|
||||
@param timeout: The number of seconds to wait for each response message.
|
||||
If None, the default, wait forever.
|
||||
@type timeout: float
|
||||
@param port: The port to which to send the message. The default is 53.
|
||||
@type port: int
|
||||
@param keyring: The TSIG keyring to use
|
||||
@type keyring: dict
|
||||
@param keyname: The name of the TSIG key to use
|
||||
@type keyname: dns.name.Name object or string
|
||||
@param relativize: If True, all names in the zone will be relativized to
|
||||
the zone origin. It is essential that the relativize setting matches
|
||||
the one specified to dns.zone.from_xfr().
|
||||
@type relativize: bool
|
||||
@param af: the address family to use. The default is None, which
|
||||
causes the address family to use to be inferred from the form of where.
|
||||
If the inference attempt fails, AF_INET is used.
|
||||
@type af: int
|
||||
@param lifetime: The total number of seconds to spend doing the transfer.
|
||||
If None, the default, then there is no limit on the time the transfer may
|
||||
take.
|
||||
@type lifetime: float
|
||||
@rtype: generator of dns.message.Message objects.
|
||||
@param source: source address. The default is the wildcard address.
|
||||
@type source: string
|
||||
@param source_port: The port from which to send the message.
|
||||
The default is 0.
|
||||
@type source_port: int
|
||||
@param serial: The SOA serial number to use as the base for an IXFR diff
|
||||
sequence (only meaningful if rdtype == dns.rdatatype.IXFR).
|
||||
@type serial: int
|
||||
@param use_udp: Use UDP (only meaningful for IXFR)
|
||||
@type use_udp: bool
|
||||
@param keyalgorithm: The TSIG algorithm to use; defaults to
|
||||
dns.tsig.default_algorithm
|
||||
@type keyalgorithm: string
|
||||
"""
|
||||
|
||||
if isinstance(zone, string_types):
|
||||
zone = dns.name.from_text(zone)
|
||||
if isinstance(rdtype, string_types):
|
||||
rdtype = dns.rdatatype.from_text(rdtype)
|
||||
q = dns.message.make_query(zone, rdtype, rdclass)
|
||||
if rdtype == dns.rdatatype.IXFR:
|
||||
rrset = dns.rrset.from_text(zone, 0, 'IN', 'SOA',
|
||||
'. . %u 0 0 0 0' % serial)
|
||||
q.authority.append(rrset)
|
||||
if keyring is not None:
|
||||
q.use_tsig(keyring, keyname, algorithm=keyalgorithm)
|
||||
wire = q.to_wire()
|
||||
(af, destination, source) = _destination_and_source(af, where, port,
|
||||
source, source_port)
|
||||
if use_udp:
|
||||
if rdtype != dns.rdatatype.IXFR:
|
||||
raise ValueError('cannot do a UDP AXFR')
|
||||
s = socket.socket(af, socket.SOCK_DGRAM, 0)
|
||||
else:
|
||||
s = socket.socket(af, socket.SOCK_STREAM, 0)
|
||||
s.setblocking(0)
|
||||
if source is not None:
|
||||
s.bind(source)
|
||||
expiration = _compute_expiration(lifetime)
|
||||
_connect(s, destination)
|
||||
l = len(wire)
|
||||
if use_udp:
|
||||
_wait_for_writable(s, expiration)
|
||||
s.send(wire)
|
||||
else:
|
||||
tcpmsg = struct.pack("!H", l) + wire
|
||||
_net_write(s, tcpmsg, expiration)
|
||||
done = False
|
||||
delete_mode = True
|
||||
expecting_SOA = False
|
||||
soa_rrset = None
|
||||
if relativize:
|
||||
origin = zone
|
||||
oname = dns.name.empty
|
||||
else:
|
||||
origin = None
|
||||
oname = zone
|
||||
tsig_ctx = None
|
||||
first = True
|
||||
while not done:
|
||||
mexpiration = _compute_expiration(timeout)
|
||||
if mexpiration is None or mexpiration > expiration:
|
||||
mexpiration = expiration
|
||||
if use_udp:
|
||||
_wait_for_readable(s, expiration)
|
||||
(wire, from_address) = s.recvfrom(65535)
|
||||
else:
|
||||
ldata = _net_read(s, 2, mexpiration)
|
||||
(l,) = struct.unpack("!H", ldata)
|
||||
wire = _net_read(s, l, mexpiration)
|
||||
is_ixfr = (rdtype == dns.rdatatype.IXFR)
|
||||
r = dns.message.from_wire(wire, keyring=q.keyring, request_mac=q.mac,
|
||||
xfr=True, origin=origin, tsig_ctx=tsig_ctx,
|
||||
multi=True, first=first,
|
||||
one_rr_per_rrset=is_ixfr)
|
||||
tsig_ctx = r.tsig_ctx
|
||||
first = False
|
||||
answer_index = 0
|
||||
if soa_rrset is None:
|
||||
if not r.answer or r.answer[0].name != oname:
|
||||
raise dns.exception.FormError(
|
||||
"No answer or RRset not for qname")
|
||||
rrset = r.answer[0]
|
||||
if rrset.rdtype != dns.rdatatype.SOA:
|
||||
raise dns.exception.FormError("first RRset is not an SOA")
|
||||
answer_index = 1
|
||||
soa_rrset = rrset.copy()
|
||||
if rdtype == dns.rdatatype.IXFR:
|
||||
if soa_rrset[0].serial <= serial:
|
||||
#
|
||||
# We're already up-to-date.
|
||||
#
|
||||
done = True
|
||||
else:
|
||||
expecting_SOA = True
|
||||
#
|
||||
# Process SOAs in the answer section (other than the initial
|
||||
# SOA in the first message).
|
||||
#
|
||||
for rrset in r.answer[answer_index:]:
|
||||
if done:
|
||||
raise dns.exception.FormError("answers after final SOA")
|
||||
if rrset.rdtype == dns.rdatatype.SOA and rrset.name == oname:
|
||||
if expecting_SOA:
|
||||
if rrset[0].serial != serial:
|
||||
raise dns.exception.FormError(
|
||||
"IXFR base serial mismatch")
|
||||
expecting_SOA = False
|
||||
elif rdtype == dns.rdatatype.IXFR:
|
||||
delete_mode = not delete_mode
|
||||
#
|
||||
# If this SOA RRset is equal to the first we saw then we're
|
||||
# finished. If this is an IXFR we also check that we're seeing
|
||||
# the record in the expected part of the response.
|
||||
#
|
||||
if rrset == soa_rrset and \
|
||||
(rdtype == dns.rdatatype.AXFR or
|
||||
(rdtype == dns.rdatatype.IXFR and delete_mode)):
|
||||
done = True
|
||||
elif expecting_SOA:
|
||||
#
|
||||
# We made an IXFR request and are expecting another
|
||||
# SOA RR, but saw something else, so this must be an
|
||||
# AXFR response.
|
||||
#
|
||||
rdtype = dns.rdatatype.AXFR
|
||||
expecting_SOA = False
|
||||
if done and q.keyring and not r.had_tsig:
|
||||
raise dns.exception.FormError("missing TSIG")
|
||||
yield r
|
||||
s.close()
|
125
lib/dns/rcode.py
Normal file
@@ -0,0 +1,125 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS Result Codes."""
|
||||
|
||||
import dns.exception
|
||||
from ._compat import long
|
||||
|
||||
|
||||
NOERROR = 0
|
||||
FORMERR = 1
|
||||
SERVFAIL = 2
|
||||
NXDOMAIN = 3
|
||||
NOTIMP = 4
|
||||
REFUSED = 5
|
||||
YXDOMAIN = 6
|
||||
YXRRSET = 7
|
||||
NXRRSET = 8
|
||||
NOTAUTH = 9
|
||||
NOTZONE = 10
|
||||
BADVERS = 16
|
||||
|
||||
_by_text = {
|
||||
'NOERROR': NOERROR,
|
||||
'FORMERR': FORMERR,
|
||||
'SERVFAIL': SERVFAIL,
|
||||
'NXDOMAIN': NXDOMAIN,
|
||||
'NOTIMP': NOTIMP,
|
||||
'REFUSED': REFUSED,
|
||||
'YXDOMAIN': YXDOMAIN,
|
||||
'YXRRSET': YXRRSET,
|
||||
'NXRRSET': NXRRSET,
|
||||
'NOTAUTH': NOTAUTH,
|
||||
'NOTZONE': NOTZONE,
|
||||
'BADVERS': BADVERS
|
||||
}
|
||||
|
||||
# We construct the inverse mapping programmatically to ensure that we
|
||||
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
|
||||
# would cause the mapping not to be a true inverse.
|
||||
|
||||
_by_value = dict((y, x) for x, y in _by_text.items())
|
||||
|
||||
|
||||
class UnknownRcode(dns.exception.DNSException):
|
||||
|
||||
"""A DNS rcode is unknown."""
|
||||
|
||||
|
||||
def from_text(text):
|
||||
"""Convert text into an rcode.
|
||||
|
||||
@param text: the textual rcode
|
||||
@type text: string
|
||||
@raises UnknownRcode: the rcode is unknown
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
if text.isdigit():
|
||||
v = int(text)
|
||||
if v >= 0 and v <= 4095:
|
||||
return v
|
||||
v = _by_text.get(text.upper())
|
||||
if v is None:
|
||||
raise UnknownRcode
|
||||
return v
|
||||
|
||||
|
||||
def from_flags(flags, ednsflags):
|
||||
"""Return the rcode value encoded by flags and ednsflags.
|
||||
|
||||
@param flags: the DNS flags
|
||||
@type flags: int
|
||||
@param ednsflags: the EDNS flags
|
||||
@type ednsflags: int
|
||||
@raises ValueError: rcode is < 0 or > 4095
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
value = (flags & 0x000f) | ((ednsflags >> 20) & 0xff0)
|
||||
if value < 0 or value > 4095:
|
||||
raise ValueError('rcode must be >= 0 and <= 4095')
|
||||
return value
|
||||
|
||||
|
||||
def to_flags(value):
|
||||
"""Return a (flags, ednsflags) tuple which encodes the rcode.
|
||||
|
||||
@param value: the rcode
|
||||
@type value: int
|
||||
@raises ValueError: rcode is < 0 or > 4095
|
||||
@rtype: (int, int) tuple
|
||||
"""
|
||||
|
||||
if value < 0 or value > 4095:
|
||||
raise ValueError('rcode must be >= 0 and <= 4095')
|
||||
v = value & 0xf
|
||||
ev = long(value & 0xff0) << 20
|
||||
return (v, ev)
|
||||
|
||||
|
||||
def to_text(value):
|
||||
"""Convert rcode into text.
|
||||
|
||||
@param value: the rcode
|
||||
@type value: int
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
text = _by_value.get(value)
|
||||
if text is None:
|
||||
text = str(value)
|
||||
return text
|
464
lib/dns/rdata.py
Normal file
@@ -0,0 +1,464 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS rdata.
|
||||
|
||||
@var _rdata_modules: A dictionary mapping a (rdclass, rdtype) tuple to
|
||||
the module which implements that type.
|
||||
@type _rdata_modules: dict
|
||||
@var _module_prefix: The prefix to use when forming modules names. The
|
||||
default is 'dns.rdtypes'. Changing this value will break the library.
|
||||
@type _module_prefix: string
|
||||
@var _hex_chunk: At most this many octets that will be represented in each
|
||||
chunk of hexstring that _hexify() produces before whitespace occurs.
|
||||
@type _hex_chunk: int"""
|
||||
|
||||
from io import BytesIO
|
||||
import base64
|
||||
import binascii
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
import dns.name
|
||||
import dns.rdataclass
|
||||
import dns.rdatatype
|
||||
import dns.tokenizer
|
||||
import dns.wiredata
|
||||
from ._compat import xrange, string_types, text_type
|
||||
|
||||
_hex_chunksize = 32
|
||||
|
||||
|
||||
def _hexify(data, chunksize=_hex_chunksize):
|
||||
"""Convert a binary string into its hex encoding, broken up into chunks
|
||||
of I{chunksize} characters separated by a space.
|
||||
|
||||
@param data: the binary string
|
||||
@type data: string
|
||||
@param chunksize: the chunk size. Default is L{dns.rdata._hex_chunksize}
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
line = binascii.hexlify(data)
|
||||
return b' '.join([line[i:i + chunksize]
|
||||
for i
|
||||
in range(0, len(line), chunksize)]).decode()
|
||||
|
||||
_base64_chunksize = 32
|
||||
|
||||
|
||||
def _base64ify(data, chunksize=_base64_chunksize):
|
||||
"""Convert a binary string into its base64 encoding, broken up into chunks
|
||||
of I{chunksize} characters separated by a space.
|
||||
|
||||
@param data: the binary string
|
||||
@type data: string
|
||||
@param chunksize: the chunk size. Default is
|
||||
L{dns.rdata._base64_chunksize}
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
line = base64.b64encode(data)
|
||||
return b' '.join([line[i:i + chunksize]
|
||||
for i
|
||||
in range(0, len(line), chunksize)]).decode()
|
||||
|
||||
__escaped = {
|
||||
'"': True,
|
||||
'\\': True,
|
||||
}
|
||||
|
||||
|
||||
def _escapify(qstring):
|
||||
"""Escape the characters in a quoted string which need it.
|
||||
|
||||
@param qstring: the string
|
||||
@type qstring: string
|
||||
@returns: the escaped string
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
if isinstance(qstring, text_type):
|
||||
qstring = qstring.encode()
|
||||
if not isinstance(qstring, bytearray):
|
||||
qstring = bytearray(qstring)
|
||||
|
||||
text = ''
|
||||
for c in qstring:
|
||||
packed = struct.pack('!B', c).decode()
|
||||
if packed in __escaped:
|
||||
text += '\\' + packed
|
||||
elif c >= 0x20 and c < 0x7F:
|
||||
text += packed
|
||||
else:
|
||||
text += '\\%03d' % c
|
||||
return text
|
||||
|
||||
|
||||
def _truncate_bitmap(what):
|
||||
"""Determine the index of greatest byte that isn't all zeros, and
|
||||
return the bitmap that contains all the bytes less than that index.
|
||||
|
||||
@param what: a string of octets representing a bitmap.
|
||||
@type what: string
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
for i in xrange(len(what) - 1, -1, -1):
|
||||
if what[i] != 0:
|
||||
break
|
||||
return what[0: i + 1]
|
||||
|
||||
|
||||
class Rdata(object):
|
||||
|
||||
"""Base class for all DNS rdata types.
|
||||
"""
|
||||
|
||||
__slots__ = ['rdclass', 'rdtype']
|
||||
|
||||
def __init__(self, rdclass, rdtype):
|
||||
"""Initialize an rdata.
|
||||
@param rdclass: The rdata class
|
||||
@type rdclass: int
|
||||
@param rdtype: The rdata type
|
||||
@type rdtype: int
|
||||
"""
|
||||
|
||||
self.rdclass = rdclass
|
||||
self.rdtype = rdtype
|
||||
|
||||
def covers(self):
|
||||
"""DNS SIG/RRSIG rdatas apply to a specific type; this type is
|
||||
returned by the covers() function. If the rdata type is not
|
||||
SIG or RRSIG, dns.rdatatype.NONE is returned. This is useful when
|
||||
creating rdatasets, allowing the rdataset to contain only RRSIGs
|
||||
of a particular type, e.g. RRSIG(NS).
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
return dns.rdatatype.NONE
|
||||
|
||||
def extended_rdatatype(self):
|
||||
"""Return a 32-bit type value, the least significant 16 bits of
|
||||
which are the ordinary DNS type, and the upper 16 bits of which are
|
||||
the "covered" type, if any.
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
return self.covers() << 16 | self.rdtype
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
"""Convert an rdata to text format.
|
||||
@rtype: string
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
"""Convert an rdata to wire format.
|
||||
@rtype: string
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
def to_digestable(self, origin=None):
|
||||
"""Convert rdata to a format suitable for digesting in hashes. This
|
||||
is also the DNSSEC canonical form."""
|
||||
f = BytesIO()
|
||||
self.to_wire(f, None, origin)
|
||||
return f.getvalue()
|
||||
|
||||
def validate(self):
|
||||
"""Check that the current contents of the rdata's fields are
|
||||
valid. If you change an rdata by assigning to its fields,
|
||||
it is a good idea to call validate() when you are done making
|
||||
changes.
|
||||
"""
|
||||
dns.rdata.from_text(self.rdclass, self.rdtype, self.to_text())
|
||||
|
||||
def __repr__(self):
|
||||
covers = self.covers()
|
||||
if covers == dns.rdatatype.NONE:
|
||||
ctext = ''
|
||||
else:
|
||||
ctext = '(' + dns.rdatatype.to_text(covers) + ')'
|
||||
return '<DNS ' + dns.rdataclass.to_text(self.rdclass) + ' ' + \
|
||||
dns.rdatatype.to_text(self.rdtype) + ctext + ' rdata: ' + \
|
||||
str(self) + '>'
|
||||
|
||||
def __str__(self):
|
||||
return self.to_text()
|
||||
|
||||
def _cmp(self, other):
|
||||
"""Compare an rdata with another rdata of the same rdtype and
|
||||
rdclass. Return < 0 if self < other in the DNSSEC ordering,
|
||||
0 if self == other, and > 0 if self > other.
|
||||
"""
|
||||
our = self.to_digestable(dns.name.root)
|
||||
their = other.to_digestable(dns.name.root)
|
||||
if our == their:
|
||||
return 0
|
||||
if our > their:
|
||||
return 1
|
||||
|
||||
return -1
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Rdata):
|
||||
return False
|
||||
if self.rdclass != other.rdclass or self.rdtype != other.rdtype:
|
||||
return False
|
||||
return self._cmp(other) == 0
|
||||
|
||||
def __ne__(self, other):
|
||||
if not isinstance(other, Rdata):
|
||||
return True
|
||||
if self.rdclass != other.rdclass or self.rdtype != other.rdtype:
|
||||
return True
|
||||
return self._cmp(other) != 0
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, Rdata) or \
|
||||
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
|
||||
|
||||
return NotImplemented
|
||||
return self._cmp(other) < 0
|
||||
|
||||
def __le__(self, other):
|
||||
if not isinstance(other, Rdata) or \
|
||||
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) <= 0
|
||||
|
||||
def __ge__(self, other):
|
||||
if not isinstance(other, Rdata) or \
|
||||
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) >= 0
|
||||
|
||||
def __gt__(self, other):
|
||||
if not isinstance(other, Rdata) or \
|
||||
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
|
||||
return NotImplemented
|
||||
return self._cmp(other) > 0
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.to_digestable(dns.name.root))
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
"""Build an rdata object from text format.
|
||||
|
||||
@param rdclass: The rdata class
|
||||
@type rdclass: int
|
||||
@param rdtype: The rdata type
|
||||
@type rdtype: int
|
||||
@param tok: The tokenizer
|
||||
@type tok: dns.tokenizer.Tokenizer
|
||||
@param origin: The origin to use for relative names
|
||||
@type origin: dns.name.Name
|
||||
@param relativize: should names be relativized?
|
||||
@type relativize: bool
|
||||
@rtype: dns.rdata.Rdata instance
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
"""Build an rdata object from wire format
|
||||
|
||||
@param rdclass: The rdata class
|
||||
@type rdclass: int
|
||||
@param rdtype: The rdata type
|
||||
@type rdtype: int
|
||||
@param wire: The wire-format message
|
||||
@type wire: string
|
||||
@param current: The offset in wire of the beginning of the rdata.
|
||||
@type current: int
|
||||
@param rdlen: The length of the wire-format rdata
|
||||
@type rdlen: int
|
||||
@param origin: The origin to use for relative names
|
||||
@type origin: dns.name.Name
|
||||
@rtype: dns.rdata.Rdata instance
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
def choose_relativity(self, origin=None, relativize=True):
|
||||
"""Convert any domain names in the rdata to the specified
|
||||
relativization.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class GenericRdata(Rdata):
|
||||
|
||||
"""Generate Rdata Class
|
||||
|
||||
This class is used for rdata types for which we have no better
|
||||
implementation. It implements the DNS "unknown RRs" scheme.
|
||||
"""
|
||||
|
||||
__slots__ = ['data']
|
||||
|
||||
def __init__(self, rdclass, rdtype, data):
|
||||
super(GenericRdata, self).__init__(rdclass, rdtype)
|
||||
self.data = data
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
return r'\# %d ' % len(self.data) + _hexify(self.data)
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
token = tok.get()
|
||||
if not token.is_identifier() or token.value != '\#':
|
||||
raise dns.exception.SyntaxError(
|
||||
r'generic rdata does not start with \#')
|
||||
length = tok.get_int()
|
||||
chunks = []
|
||||
while 1:
|
||||
token = tok.get()
|
||||
if token.is_eol_or_eof():
|
||||
break
|
||||
chunks.append(token.value.encode())
|
||||
hex = b''.join(chunks)
|
||||
data = binascii.unhexlify(hex)
|
||||
if len(data) != length:
|
||||
raise dns.exception.SyntaxError(
|
||||
'generic rdata hex data has wrong length')
|
||||
return cls(rdclass, rdtype, data)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
file.write(self.data)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
return cls(rdclass, rdtype, wire[current: current + rdlen])
|
||||
|
||||
_rdata_modules = {}
|
||||
_module_prefix = 'dns.rdtypes'
|
||||
|
||||
|
||||
def get_rdata_class(rdclass, rdtype):
|
||||
|
||||
def import_module(name):
|
||||
mod = __import__(name)
|
||||
components = name.split('.')
|
||||
for comp in components[1:]:
|
||||
mod = getattr(mod, comp)
|
||||
return mod
|
||||
|
||||
mod = _rdata_modules.get((rdclass, rdtype))
|
||||
rdclass_text = dns.rdataclass.to_text(rdclass)
|
||||
rdtype_text = dns.rdatatype.to_text(rdtype)
|
||||
rdtype_text = rdtype_text.replace('-', '_')
|
||||
if not mod:
|
||||
mod = _rdata_modules.get((dns.rdatatype.ANY, rdtype))
|
||||
if not mod:
|
||||
try:
|
||||
mod = import_module('.'.join([_module_prefix,
|
||||
rdclass_text, rdtype_text]))
|
||||
_rdata_modules[(rdclass, rdtype)] = mod
|
||||
except ImportError:
|
||||
try:
|
||||
mod = import_module('.'.join([_module_prefix,
|
||||
'ANY', rdtype_text]))
|
||||
_rdata_modules[(dns.rdataclass.ANY, rdtype)] = mod
|
||||
except ImportError:
|
||||
mod = None
|
||||
if mod:
|
||||
cls = getattr(mod, rdtype_text)
|
||||
else:
|
||||
cls = GenericRdata
|
||||
return cls
|
||||
|
||||
|
||||
def from_text(rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
"""Build an rdata object from text format.
|
||||
|
||||
This function attempts to dynamically load a class which
|
||||
implements the specified rdata class and type. If there is no
|
||||
class-and-type-specific implementation, the GenericRdata class
|
||||
is used.
|
||||
|
||||
Once a class is chosen, its from_text() class method is called
|
||||
with the parameters to this function.
|
||||
|
||||
If I{tok} is a string, then a tokenizer is created and the string
|
||||
is used as its input.
|
||||
|
||||
@param rdclass: The rdata class
|
||||
@type rdclass: int
|
||||
@param rdtype: The rdata type
|
||||
@type rdtype: int
|
||||
@param tok: The tokenizer or input text
|
||||
@type tok: dns.tokenizer.Tokenizer or string
|
||||
@param origin: The origin to use for relative names
|
||||
@type origin: dns.name.Name
|
||||
@param relativize: Should names be relativized?
|
||||
@type relativize: bool
|
||||
@rtype: dns.rdata.Rdata instance"""
|
||||
|
||||
if isinstance(tok, string_types):
|
||||
tok = dns.tokenizer.Tokenizer(tok)
|
||||
cls = get_rdata_class(rdclass, rdtype)
|
||||
if cls != GenericRdata:
|
||||
# peek at first token
|
||||
token = tok.get()
|
||||
tok.unget(token)
|
||||
if token.is_identifier() and \
|
||||
token.value == r'\#':
|
||||
#
|
||||
# Known type using the generic syntax. Extract the
|
||||
# wire form from the generic syntax, and then run
|
||||
# from_wire on it.
|
||||
#
|
||||
rdata = GenericRdata.from_text(rdclass, rdtype, tok, origin,
|
||||
relativize)
|
||||
return from_wire(rdclass, rdtype, rdata.data, 0, len(rdata.data),
|
||||
origin)
|
||||
return cls.from_text(rdclass, rdtype, tok, origin, relativize)
|
||||
|
||||
|
||||
def from_wire(rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
"""Build an rdata object from wire format
|
||||
|
||||
This function attempts to dynamically load a class which
|
||||
implements the specified rdata class and type. If there is no
|
||||
class-and-type-specific implementation, the GenericRdata class
|
||||
is used.
|
||||
|
||||
Once a class is chosen, its from_wire() class method is called
|
||||
with the parameters to this function.
|
||||
|
||||
@param rdclass: The rdata class
|
||||
@type rdclass: int
|
||||
@param rdtype: The rdata type
|
||||
@type rdtype: int
|
||||
@param wire: The wire-format message
|
||||
@type wire: string
|
||||
@param current: The offset in wire of the beginning of the rdata.
|
||||
@type current: int
|
||||
@param rdlen: The length of the wire-format rdata
|
||||
@type rdlen: int
|
||||
@param origin: The origin to use for relative names
|
||||
@type origin: dns.name.Name
|
||||
@rtype: dns.rdata.Rdata instance"""
|
||||
|
||||
wire = dns.wiredata.maybe_wrap(wire)
|
||||
cls = get_rdata_class(rdclass, rdtype)
|
||||
return cls.from_wire(rdclass, rdtype, wire, current, rdlen, origin)
|
118
lib/dns/rdataclass.py
Normal file
@@ -0,0 +1,118 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS Rdata Classes.
|
||||
|
||||
@var _by_text: The rdata class textual name to value mapping
|
||||
@type _by_text: dict
|
||||
@var _by_value: The rdata class value to textual name mapping
|
||||
@type _by_value: dict
|
||||
@var _metaclasses: If an rdataclass is a metaclass, there will be a mapping
|
||||
whose key is the rdatatype value and whose value is True in this dictionary.
|
||||
@type _metaclasses: dict"""
|
||||
|
||||
import re
|
||||
|
||||
import dns.exception
|
||||
|
||||
RESERVED0 = 0
|
||||
IN = 1
|
||||
CH = 3
|
||||
HS = 4
|
||||
NONE = 254
|
||||
ANY = 255
|
||||
|
||||
_by_text = {
|
||||
'RESERVED0': RESERVED0,
|
||||
'IN': IN,
|
||||
'CH': CH,
|
||||
'HS': HS,
|
||||
'NONE': NONE,
|
||||
'ANY': ANY
|
||||
}
|
||||
|
||||
# We construct the inverse mapping programmatically to ensure that we
|
||||
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
|
||||
# would cause the mapping not to be true inverse.
|
||||
|
||||
_by_value = dict((y, x) for x, y in _by_text.items())
|
||||
|
||||
# Now that we've built the inverse map, we can add class aliases to
|
||||
# the _by_text mapping.
|
||||
|
||||
_by_text.update({
|
||||
'INTERNET': IN,
|
||||
'CHAOS': CH,
|
||||
'HESIOD': HS
|
||||
})
|
||||
|
||||
_metaclasses = {
|
||||
NONE: True,
|
||||
ANY: True
|
||||
}
|
||||
|
||||
_unknown_class_pattern = re.compile('CLASS([0-9]+)$', re.I)
|
||||
|
||||
|
||||
class UnknownRdataclass(dns.exception.DNSException):
|
||||
|
||||
"""A DNS class is unknown."""
|
||||
|
||||
|
||||
def from_text(text):
|
||||
"""Convert text into a DNS rdata class value.
|
||||
@param text: the text
|
||||
@type text: string
|
||||
@rtype: int
|
||||
@raises dns.rdataclass.UnknownRdataclass: the class is unknown
|
||||
@raises ValueError: the rdata class value is not >= 0 and <= 65535
|
||||
"""
|
||||
|
||||
value = _by_text.get(text.upper())
|
||||
if value is None:
|
||||
match = _unknown_class_pattern.match(text)
|
||||
if match is None:
|
||||
raise UnknownRdataclass
|
||||
value = int(match.group(1))
|
||||
if value < 0 or value > 65535:
|
||||
raise ValueError("class must be between >= 0 and <= 65535")
|
||||
return value
|
||||
|
||||
|
||||
def to_text(value):
|
||||
"""Convert a DNS rdata class to text.
|
||||
@param value: the rdata class value
|
||||
@type value: int
|
||||
@rtype: string
|
||||
@raises ValueError: the rdata class value is not >= 0 and <= 65535
|
||||
"""
|
||||
|
||||
if value < 0 or value > 65535:
|
||||
raise ValueError("class must be between >= 0 and <= 65535")
|
||||
text = _by_value.get(value)
|
||||
if text is None:
|
||||
text = 'CLASS' + repr(value)
|
||||
return text
|
||||
|
||||
|
||||
def is_metaclass(rdclass):
|
||||
"""True if the class is a metaclass.
|
||||
@param rdclass: the rdata class
|
||||
@type rdclass: int
|
||||
@rtype: bool"""
|
||||
|
||||
if rdclass in _metaclasses:
|
||||
return True
|
||||
return False
|
338
lib/dns/rdataset.py
Normal file
@@ -0,0 +1,338 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS rdatasets (an rdataset is a set of rdatas of a given type and class)"""
|
||||
|
||||
import random
|
||||
from io import StringIO
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
import dns.rdatatype
|
||||
import dns.rdataclass
|
||||
import dns.rdata
|
||||
import dns.set
|
||||
from ._compat import string_types
|
||||
|
||||
# define SimpleSet here for backwards compatibility
|
||||
SimpleSet = dns.set.Set
|
||||
|
||||
|
||||
class DifferingCovers(dns.exception.DNSException):
|
||||
|
||||
"""An attempt was made to add a DNS SIG/RRSIG whose covered type
|
||||
is not the same as that of the other rdatas in the rdataset."""
|
||||
|
||||
|
||||
class IncompatibleTypes(dns.exception.DNSException):
|
||||
|
||||
"""An attempt was made to add DNS RR data of an incompatible type."""
|
||||
|
||||
|
||||
class Rdataset(dns.set.Set):
|
||||
|
||||
"""A DNS rdataset.
|
||||
|
||||
@ivar rdclass: The class of the rdataset
|
||||
@type rdclass: int
|
||||
@ivar rdtype: The type of the rdataset
|
||||
@type rdtype: int
|
||||
@ivar covers: The covered type. Usually this value is
|
||||
dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or
|
||||
dns.rdatatype.RRSIG, then the covers value will be the rdata
|
||||
type the SIG/RRSIG covers. The library treats the SIG and RRSIG
|
||||
types as if they were a family of
|
||||
types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much
|
||||
easier to work with than if RRSIGs covering different rdata
|
||||
types were aggregated into a single RRSIG rdataset.
|
||||
@type covers: int
|
||||
@ivar ttl: The DNS TTL (Time To Live) value
|
||||
@type ttl: int
|
||||
"""
|
||||
|
||||
__slots__ = ['rdclass', 'rdtype', 'covers', 'ttl']
|
||||
|
||||
def __init__(self, rdclass, rdtype, covers=dns.rdatatype.NONE):
|
||||
"""Create a new rdataset of the specified class and type.
|
||||
|
||||
@see: the description of the class instance variables for the
|
||||
meaning of I{rdclass} and I{rdtype}"""
|
||||
|
||||
super(Rdataset, self).__init__()
|
||||
self.rdclass = rdclass
|
||||
self.rdtype = rdtype
|
||||
self.covers = covers
|
||||
self.ttl = 0
|
||||
|
||||
def _clone(self):
|
||||
obj = super(Rdataset, self)._clone()
|
||||
obj.rdclass = self.rdclass
|
||||
obj.rdtype = self.rdtype
|
||||
obj.covers = self.covers
|
||||
obj.ttl = self.ttl
|
||||
return obj
|
||||
|
||||
def update_ttl(self, ttl):
|
||||
"""Set the TTL of the rdataset to be the lesser of the set's current
|
||||
TTL or the specified TTL. If the set contains no rdatas, set the TTL
|
||||
to the specified TTL.
|
||||
@param ttl: The TTL
|
||||
@type ttl: int"""
|
||||
|
||||
if len(self) == 0:
|
||||
self.ttl = ttl
|
||||
elif ttl < self.ttl:
|
||||
self.ttl = ttl
|
||||
|
||||
def add(self, rd, ttl=None):
|
||||
"""Add the specified rdata to the rdataset.
|
||||
|
||||
If the optional I{ttl} parameter is supplied, then
|
||||
self.update_ttl(ttl) will be called prior to adding the rdata.
|
||||
|
||||
@param rd: The rdata
|
||||
@type rd: dns.rdata.Rdata object
|
||||
@param ttl: The TTL
|
||||
@type ttl: int"""
|
||||
|
||||
#
|
||||
# If we're adding a signature, do some special handling to
|
||||
# check that the signature covers the same type as the
|
||||
# other rdatas in this rdataset. If this is the first rdata
|
||||
# in the set, initialize the covers field.
|
||||
#
|
||||
if self.rdclass != rd.rdclass or self.rdtype != rd.rdtype:
|
||||
raise IncompatibleTypes
|
||||
if ttl is not None:
|
||||
self.update_ttl(ttl)
|
||||
if self.rdtype == dns.rdatatype.RRSIG or \
|
||||
self.rdtype == dns.rdatatype.SIG:
|
||||
covers = rd.covers()
|
||||
if len(self) == 0 and self.covers == dns.rdatatype.NONE:
|
||||
self.covers = covers
|
||||
elif self.covers != covers:
|
||||
raise DifferingCovers
|
||||
if dns.rdatatype.is_singleton(rd.rdtype) and len(self) > 0:
|
||||
self.clear()
|
||||
super(Rdataset, self).add(rd)
|
||||
|
||||
def union_update(self, other):
|
||||
self.update_ttl(other.ttl)
|
||||
super(Rdataset, self).union_update(other)
|
||||
|
||||
def intersection_update(self, other):
|
||||
self.update_ttl(other.ttl)
|
||||
super(Rdataset, self).intersection_update(other)
|
||||
|
||||
def update(self, other):
|
||||
"""Add all rdatas in other to self.
|
||||
|
||||
@param other: The rdataset from which to update
|
||||
@type other: dns.rdataset.Rdataset object"""
|
||||
|
||||
self.update_ttl(other.ttl)
|
||||
super(Rdataset, self).update(other)
|
||||
|
||||
def __repr__(self):
|
||||
if self.covers == 0:
|
||||
ctext = ''
|
||||
else:
|
||||
ctext = '(' + dns.rdatatype.to_text(self.covers) + ')'
|
||||
return '<DNS ' + dns.rdataclass.to_text(self.rdclass) + ' ' + \
|
||||
dns.rdatatype.to_text(self.rdtype) + ctext + ' rdataset>'
|
||||
|
||||
def __str__(self):
|
||||
return self.to_text()
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Two rdatasets are equal if they have the same class, type, and
|
||||
covers, and contain the same rdata.
|
||||
@rtype: bool"""
|
||||
|
||||
if not isinstance(other, Rdataset):
|
||||
return False
|
||||
if self.rdclass != other.rdclass or \
|
||||
self.rdtype != other.rdtype or \
|
||||
self.covers != other.covers:
|
||||
return False
|
||||
return super(Rdataset, self).__eq__(other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def to_text(self, name=None, origin=None, relativize=True,
|
||||
override_rdclass=None, **kw):
|
||||
"""Convert the rdataset into DNS master file format.
|
||||
|
||||
@see: L{dns.name.Name.choose_relativity} for more information
|
||||
on how I{origin} and I{relativize} determine the way names
|
||||
are emitted.
|
||||
|
||||
Any additional keyword arguments are passed on to the rdata
|
||||
to_text() method.
|
||||
|
||||
@param name: If name is not None, emit a RRs with I{name} as
|
||||
the owner name.
|
||||
@type name: dns.name.Name object
|
||||
@param origin: The origin for relative names, or None.
|
||||
@type origin: dns.name.Name object
|
||||
@param relativize: True if names should names be relativized
|
||||
@type relativize: bool"""
|
||||
if name is not None:
|
||||
name = name.choose_relativity(origin, relativize)
|
||||
ntext = str(name)
|
||||
pad = ' '
|
||||
else:
|
||||
ntext = ''
|
||||
pad = ''
|
||||
s = StringIO()
|
||||
if override_rdclass is not None:
|
||||
rdclass = override_rdclass
|
||||
else:
|
||||
rdclass = self.rdclass
|
||||
if len(self) == 0:
|
||||
#
|
||||
# Empty rdatasets are used for the question section, and in
|
||||
# some dynamic updates, so we don't need to print out the TTL
|
||||
# (which is meaningless anyway).
|
||||
#
|
||||
s.write(u'%s%s%s %s\n' % (ntext, pad,
|
||||
dns.rdataclass.to_text(rdclass),
|
||||
dns.rdatatype.to_text(self.rdtype)))
|
||||
else:
|
||||
for rd in self:
|
||||
s.write(u'%s%s%d %s %s %s\n' %
|
||||
(ntext, pad, self.ttl, dns.rdataclass.to_text(rdclass),
|
||||
dns.rdatatype.to_text(self.rdtype),
|
||||
rd.to_text(origin=origin, relativize=relativize,
|
||||
**kw)))
|
||||
#
|
||||
# We strip off the final \n for the caller's convenience in printing
|
||||
#
|
||||
return s.getvalue()[:-1]
|
||||
|
||||
def to_wire(self, name, file, compress=None, origin=None,
|
||||
override_rdclass=None, want_shuffle=True):
|
||||
"""Convert the rdataset to wire format.
|
||||
|
||||
@param name: The owner name of the RRset that will be emitted
|
||||
@type name: dns.name.Name object
|
||||
@param file: The file to which the wire format data will be appended
|
||||
@type file: file
|
||||
@param compress: The compression table to use; the default is None.
|
||||
@type compress: dict
|
||||
@param origin: The origin to be appended to any relative names when
|
||||
they are emitted. The default is None.
|
||||
@returns: the number of records emitted
|
||||
@rtype: int
|
||||
"""
|
||||
|
||||
if override_rdclass is not None:
|
||||
rdclass = override_rdclass
|
||||
want_shuffle = False
|
||||
else:
|
||||
rdclass = self.rdclass
|
||||
file.seek(0, 2)
|
||||
if len(self) == 0:
|
||||
name.to_wire(file, compress, origin)
|
||||
stuff = struct.pack("!HHIH", self.rdtype, rdclass, 0, 0)
|
||||
file.write(stuff)
|
||||
return 1
|
||||
else:
|
||||
if want_shuffle:
|
||||
l = list(self)
|
||||
random.shuffle(l)
|
||||
else:
|
||||
l = self
|
||||
for rd in l:
|
||||
name.to_wire(file, compress, origin)
|
||||
stuff = struct.pack("!HHIH", self.rdtype, rdclass,
|
||||
self.ttl, 0)
|
||||
file.write(stuff)
|
||||
start = file.tell()
|
||||
rd.to_wire(file, compress, origin)
|
||||
end = file.tell()
|
||||
assert end - start < 65536
|
||||
file.seek(start - 2)
|
||||
stuff = struct.pack("!H", end - start)
|
||||
file.write(stuff)
|
||||
file.seek(0, 2)
|
||||
return len(self)
|
||||
|
||||
def match(self, rdclass, rdtype, covers):
|
||||
"""Returns True if this rdataset matches the specified class, type,
|
||||
and covers"""
|
||||
if self.rdclass == rdclass and \
|
||||
self.rdtype == rdtype and \
|
||||
self.covers == covers:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def from_text_list(rdclass, rdtype, ttl, text_rdatas):
|
||||
"""Create an rdataset with the specified class, type, and TTL, and with
|
||||
the specified list of rdatas in text format.
|
||||
|
||||
@rtype: dns.rdataset.Rdataset object
|
||||
"""
|
||||
|
||||
if isinstance(rdclass, string_types):
|
||||
rdclass = dns.rdataclass.from_text(rdclass)
|
||||
if isinstance(rdtype, string_types):
|
||||
rdtype = dns.rdatatype.from_text(rdtype)
|
||||
r = Rdataset(rdclass, rdtype)
|
||||
r.update_ttl(ttl)
|
||||
for t in text_rdatas:
|
||||
rd = dns.rdata.from_text(r.rdclass, r.rdtype, t)
|
||||
r.add(rd)
|
||||
return r
|
||||
|
||||
|
||||
def from_text(rdclass, rdtype, ttl, *text_rdatas):
|
||||
"""Create an rdataset with the specified class, type, and TTL, and with
|
||||
the specified rdatas in text format.
|
||||
|
||||
@rtype: dns.rdataset.Rdataset object
|
||||
"""
|
||||
|
||||
return from_text_list(rdclass, rdtype, ttl, text_rdatas)
|
||||
|
||||
|
||||
def from_rdata_list(ttl, rdatas):
|
||||
"""Create an rdataset with the specified TTL, and with
|
||||
the specified list of rdata objects.
|
||||
|
||||
@rtype: dns.rdataset.Rdataset object
|
||||
"""
|
||||
|
||||
if len(rdatas) == 0:
|
||||
raise ValueError("rdata list must not be empty")
|
||||
r = None
|
||||
for rd in rdatas:
|
||||
if r is None:
|
||||
r = Rdataset(rd.rdclass, rd.rdtype)
|
||||
r.update_ttl(ttl)
|
||||
r.add(rd)
|
||||
return r
|
||||
|
||||
|
||||
def from_rdata(ttl, *rdatas):
|
||||
"""Create an rdataset with the specified TTL, and with
|
||||
the specified rdata objects.
|
||||
|
||||
@rtype: dns.rdataset.Rdataset object
|
||||
"""
|
||||
|
||||
return from_rdata_list(ttl, rdatas)
|
253
lib/dns/rdatatype.py
Normal file
@@ -0,0 +1,253 @@
|
||||
# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""DNS Rdata Types.
|
||||
|
||||
@var _by_text: The rdata type textual name to value mapping
|
||||
@type _by_text: dict
|
||||
@var _by_value: The rdata type value to textual name mapping
|
||||
@type _by_value: dict
|
||||
@var _metatypes: If an rdatatype is a metatype, there will be a mapping
|
||||
whose key is the rdatatype value and whose value is True in this dictionary.
|
||||
@type _metatypes: dict
|
||||
@var _singletons: If an rdatatype is a singleton, there will be a mapping
|
||||
whose key is the rdatatype value and whose value is True in this dictionary.
|
||||
@type _singletons: dict"""
|
||||
|
||||
import re
|
||||
|
||||
import dns.exception
|
||||
|
||||
NONE = 0
|
||||
A = 1
|
||||
NS = 2
|
||||
MD = 3
|
||||
MF = 4
|
||||
CNAME = 5
|
||||
SOA = 6
|
||||
MB = 7
|
||||
MG = 8
|
||||
MR = 9
|
||||
NULL = 10
|
||||
WKS = 11
|
||||
PTR = 12
|
||||
HINFO = 13
|
||||
MINFO = 14
|
||||
MX = 15
|
||||
TXT = 16
|
||||
RP = 17
|
||||
AFSDB = 18
|
||||
X25 = 19
|
||||
ISDN = 20
|
||||
RT = 21
|
||||
NSAP = 22
|
||||
NSAP_PTR = 23
|
||||
SIG = 24
|
||||
KEY = 25
|
||||
PX = 26
|
||||
GPOS = 27
|
||||
AAAA = 28
|
||||
LOC = 29
|
||||
NXT = 30
|
||||
SRV = 33
|
||||
NAPTR = 35
|
||||
KX = 36
|
||||
CERT = 37
|
||||
A6 = 38
|
||||
DNAME = 39
|
||||
OPT = 41
|
||||
APL = 42
|
||||
DS = 43
|
||||
SSHFP = 44
|
||||
IPSECKEY = 45
|
||||
RRSIG = 46
|
||||
NSEC = 47
|
||||
DNSKEY = 48
|
||||
DHCID = 49
|
||||
NSEC3 = 50
|
||||
NSEC3PARAM = 51
|
||||
TLSA = 52
|
||||
HIP = 55
|
||||
CDS = 59
|
||||
CDNSKEY = 60
|
||||
CSYNC = 62
|
||||
SPF = 99
|
||||
UNSPEC = 103
|
||||
EUI48 = 108
|
||||
EUI64 = 109
|
||||
TKEY = 249
|
||||
TSIG = 250
|
||||
IXFR = 251
|
||||
AXFR = 252
|
||||
MAILB = 253
|
||||
MAILA = 254
|
||||
ANY = 255
|
||||
URI = 256
|
||||
CAA = 257
|
||||
TA = 32768
|
||||
DLV = 32769
|
||||
|
||||
_by_text = {
|
||||
'NONE': NONE,
|
||||
'A': A,
|
||||
'NS': NS,
|
||||
'MD': MD,
|
||||
'MF': MF,
|
||||
'CNAME': CNAME,
|
||||
'SOA': SOA,
|
||||
'MB': MB,
|
||||
'MG': MG,
|
||||
'MR': MR,
|
||||
'NULL': NULL,
|
||||
'WKS': WKS,
|
||||
'PTR': PTR,
|
||||
'HINFO': HINFO,
|
||||
'MINFO': MINFO,
|
||||
'MX': MX,
|
||||
'TXT': TXT,
|
||||
'RP': RP,
|
||||
'AFSDB': AFSDB,
|
||||
'X25': X25,
|
||||
'ISDN': ISDN,
|
||||
'RT': RT,
|
||||
'NSAP': NSAP,
|
||||
'NSAP-PTR': NSAP_PTR,
|
||||
'SIG': SIG,
|
||||
'KEY': KEY,
|
||||
'PX': PX,
|
||||
'GPOS': GPOS,
|
||||
'AAAA': AAAA,
|
||||
'LOC': LOC,
|
||||
'NXT': NXT,
|
||||
'SRV': SRV,
|
||||
'NAPTR': NAPTR,
|
||||
'KX': KX,
|
||||
'CERT': CERT,
|
||||
'A6': A6,
|
||||
'DNAME': DNAME,
|
||||
'OPT': OPT,
|
||||
'APL': APL,
|
||||
'DS': DS,
|
||||
'SSHFP': SSHFP,
|
||||
'IPSECKEY': IPSECKEY,
|
||||
'RRSIG': RRSIG,
|
||||
'NSEC': NSEC,
|
||||
'DNSKEY': DNSKEY,
|
||||
'DHCID': DHCID,
|
||||
'NSEC3': NSEC3,
|
||||
'NSEC3PARAM': NSEC3PARAM,
|
||||
'TLSA': TLSA,
|
||||
'HIP': HIP,
|
||||
'CDS': CDS,
|
||||
'CDNSKEY': CDNSKEY,
|
||||
'CSYNC': CSYNC,
|
||||
'SPF': SPF,
|
||||
'UNSPEC': UNSPEC,
|
||||
'EUI48': EUI48,
|
||||
'EUI64': EUI64,
|
||||
'TKEY': TKEY,
|
||||
'TSIG': TSIG,
|
||||
'IXFR': IXFR,
|
||||
'AXFR': AXFR,
|
||||
'MAILB': MAILB,
|
||||
'MAILA': MAILA,
|
||||
'ANY': ANY,
|
||||
'URI': URI,
|
||||
'CAA': CAA,
|
||||
'TA': TA,
|
||||
'DLV': DLV,
|
||||
}
|
||||
|
||||
# We construct the inverse mapping programmatically to ensure that we
|
||||
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
|
||||
# would cause the mapping not to be true inverse.
|
||||
|
||||
_by_value = dict((y, x) for x, y in _by_text.items())
|
||||
|
||||
|
||||
_metatypes = {
|
||||
OPT: True
|
||||
}
|
||||
|
||||
_singletons = {
|
||||
SOA: True,
|
||||
NXT: True,
|
||||
DNAME: True,
|
||||
NSEC: True,
|
||||
# CNAME is technically a singleton, but we allow multiple CNAMEs.
|
||||
}
|
||||
|
||||
_unknown_type_pattern = re.compile('TYPE([0-9]+)$', re.I)
|
||||
|
||||
|
||||
class UnknownRdatatype(dns.exception.DNSException):
|
||||
|
||||
"""DNS resource record type is unknown."""
|
||||
|
||||
|
||||
def from_text(text):
|
||||
"""Convert text into a DNS rdata type value.
|
||||
@param text: the text
|
||||
@type text: string
|
||||
@raises dns.rdatatype.UnknownRdatatype: the type is unknown
|
||||
@raises ValueError: the rdata type value is not >= 0 and <= 65535
|
||||
@rtype: int"""
|
||||
|
||||
value = _by_text.get(text.upper())
|
||||
if value is None:
|
||||
match = _unknown_type_pattern.match(text)
|
||||
if match is None:
|
||||
raise UnknownRdatatype
|
||||
value = int(match.group(1))
|
||||
if value < 0 or value > 65535:
|
||||
raise ValueError("type must be between >= 0 and <= 65535")
|
||||
return value
|
||||
|
||||
|
||||
def to_text(value):
|
||||
"""Convert a DNS rdata type to text.
|
||||
@param value: the rdata type value
|
||||
@type value: int
|
||||
@raises ValueError: the rdata type value is not >= 0 and <= 65535
|
||||
@rtype: string"""
|
||||
|
||||
if value < 0 or value > 65535:
|
||||
raise ValueError("type must be between >= 0 and <= 65535")
|
||||
text = _by_value.get(value)
|
||||
if text is None:
|
||||
text = 'TYPE' + repr(value)
|
||||
return text
|
||||
|
||||
|
||||
def is_metatype(rdtype):
|
||||
"""True if the type is a metatype.
|
||||
@param rdtype: the type
|
||||
@type rdtype: int
|
||||
@rtype: bool"""
|
||||
|
||||
if rdtype >= TKEY and rdtype <= ANY or rdtype in _metatypes:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_singleton(rdtype):
|
||||
"""True if the type is a singleton.
|
||||
@param rdtype: the type
|
||||
@type rdtype: int
|
||||
@rtype: bool"""
|
||||
|
||||
if rdtype in _singletons:
|
||||
return True
|
||||
return False
|
53
lib/dns/rdtypes/ANY/AFSDB.py
Normal file
@@ -0,0 +1,53 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.mxbase
|
||||
|
||||
|
||||
class AFSDB(dns.rdtypes.mxbase.UncompressedDowncasingMX):
|
||||
|
||||
"""AFSDB record
|
||||
|
||||
@ivar subtype: the subtype value
|
||||
@type subtype: int
|
||||
@ivar hostname: the hostname name
|
||||
@type hostname: dns.name.Name object"""
|
||||
|
||||
# Use the property mechanism to make "subtype" an alias for the
|
||||
# "preference" attribute, and "hostname" an alias for the "exchange"
|
||||
# attribute.
|
||||
#
|
||||
# This lets us inherit the UncompressedMX implementation but lets
|
||||
# the caller use appropriate attribute names for the rdata type.
|
||||
#
|
||||
# We probably lose some performance vs. a cut-and-paste
|
||||
# implementation, but this way we don't copy code, and that's
|
||||
# good.
|
||||
|
||||
def get_subtype(self):
|
||||
return self.preference
|
||||
|
||||
def set_subtype(self, subtype):
|
||||
self.preference = subtype
|
||||
|
||||
subtype = property(get_subtype, set_subtype)
|
||||
|
||||
def get_hostname(self):
|
||||
return self.exchange
|
||||
|
||||
def set_hostname(self, hostname):
|
||||
self.exchange = hostname
|
||||
|
||||
hostname = property(get_hostname, set_hostname)
|
74
lib/dns/rdtypes/ANY/CAA.py
Normal file
@@ -0,0 +1,74 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import struct
|
||||
|
||||
import dns.exception
|
||||
import dns.rdata
|
||||
import dns.tokenizer
|
||||
|
||||
|
||||
class CAA(dns.rdata.Rdata):
|
||||
|
||||
"""CAA (Certification Authority Authorization) record
|
||||
|
||||
@ivar flags: the flags
|
||||
@type flags: int
|
||||
@ivar tag: the tag
|
||||
@type tag: string
|
||||
@ivar value: the value
|
||||
@type value: string
|
||||
@see: RFC 6844"""
|
||||
|
||||
__slots__ = ['flags', 'tag', 'value']
|
||||
|
||||
def __init__(self, rdclass, rdtype, flags, tag, value):
|
||||
super(CAA, self).__init__(rdclass, rdtype)
|
||||
self.flags = flags
|
||||
self.tag = tag
|
||||
self.value = value
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
return '%u %s "%s"' % (self.flags,
|
||||
dns.rdata._escapify(self.tag),
|
||||
dns.rdata._escapify(self.value))
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
flags = tok.get_uint8()
|
||||
tag = tok.get_string().encode()
|
||||
if len(tag) > 255:
|
||||
raise dns.exception.SyntaxError("tag too long")
|
||||
if not tag.isalnum():
|
||||
raise dns.exception.SyntaxError("tag is not alphanumeric")
|
||||
value = tok.get_string().encode()
|
||||
return cls(rdclass, rdtype, flags, tag, value)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
file.write(struct.pack('!B', self.flags))
|
||||
l = len(self.tag)
|
||||
assert l < 256
|
||||
file.write(struct.pack('!B', l))
|
||||
file.write(self.tag)
|
||||
file.write(self.value)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
(flags, l) = struct.unpack('!BB', wire[current: current + 2])
|
||||
current += 2
|
||||
tag = wire[current: current + l]
|
||||
value = wire[current + l:current + rdlen - 2]
|
||||
return cls(rdclass, rdtype, flags, tag, value)
|
||||
|
25
lib/dns/rdtypes/ANY/CDNSKEY.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.dnskeybase
|
||||
from dns.rdtypes.dnskeybase import flags_to_text_set, flags_from_text_set
|
||||
|
||||
|
||||
__all__ = ['flags_to_text_set', 'flags_from_text_set']
|
||||
|
||||
|
||||
class CDNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase):
|
||||
|
||||
"""CDNSKEY record"""
|
21
lib/dns/rdtypes/ANY/CDS.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import dns.rdtypes.dsbase
|
||||
|
||||
|
||||
class CDS(dns.rdtypes.dsbase.DSBase):
|
||||
|
||||
"""CDS record"""
|
122
lib/dns/rdtypes/ANY/CERT.py
Normal file
@@ -0,0 +1,122 @@
|
||||
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and distribute this software and its
|
||||
# documentation for any purpose with or without fee is hereby granted,
|
||||
# provided that the above copyright notice and this permission notice
|
||||
# appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import struct
|
||||
import base64
|
||||
|
||||
import dns.exception
|
||||
import dns.dnssec
|
||||
import dns.rdata
|
||||
import dns.tokenizer
|
||||
|
||||
_ctype_by_value = {
|
||||
1: 'PKIX',
|
||||
2: 'SPKI',
|
||||
3: 'PGP',
|
||||
253: 'URI',
|
||||
254: 'OID',
|
||||
}
|
||||
|
||||
_ctype_by_name = {
|
||||
'PKIX': 1,
|
||||
'SPKI': 2,
|
||||
'PGP': 3,
|
||||
'URI': 253,
|
||||
'OID': 254,
|
||||
}
|
||||
|
||||
|
||||
def _ctype_from_text(what):
|
||||
v = _ctype_by_name.get(what)
|
||||
if v is not None:
|
||||
return v
|
||||
return int(what)
|
||||
|
||||
|
||||
def _ctype_to_text(what):
|
||||
v = _ctype_by_value.get(what)
|
||||
if v is not None:
|
||||
return v
|
||||
return str(what)
|
||||
|
||||
|
||||
class CERT(dns.rdata.Rdata):
|
||||
|
||||
"""CERT record
|
||||
|
||||
@ivar certificate_type: certificate type
|
||||
@type certificate_type: int
|
||||
@ivar key_tag: key tag
|
||||
@type key_tag: int
|
||||
@ivar algorithm: algorithm
|
||||
@type algorithm: int
|
||||
@ivar certificate: the certificate or CRL
|
||||
@type certificate: string
|
||||
@see: RFC 2538"""
|
||||
|
||||
__slots__ = ['certificate_type', 'key_tag', 'algorithm', 'certificate']
|
||||
|
||||
def __init__(self, rdclass, rdtype, certificate_type, key_tag, algorithm,
|
||||
certificate):
|
||||
super(CERT, self).__init__(rdclass, rdtype)
|
||||
self.certificate_type = certificate_type
|
||||
self.key_tag = key_tag
|
||||
self.algorithm = algorithm
|
||||
self.certificate = certificate
|
||||
|
||||
def to_text(self, origin=None, relativize=True, **kw):
|
||||
certificate_type = _ctype_to_text(self.certificate_type)
|
||||
return "%s %d %s %s" % (certificate_type, self.key_tag,
|
||||
dns.dnssec.algorithm_to_text(self.algorithm),
|
||||
dns.rdata._base64ify(self.certificate))
|
||||
|
||||
@classmethod
|
||||
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
|
||||
certificate_type = _ctype_from_text(tok.get_string())
|
||||
key_tag = tok.get_uint16()
|
||||
algorithm = dns.dnssec.algorithm_from_text(tok.get_string())
|
||||
if algorithm < 0 or algorithm > 255:
|
||||
raise dns.exception.SyntaxError("bad algorithm type")
|
||||
chunks = []
|
||||
while 1:
|
||||
t = tok.get().unescape()
|
||||
if t.is_eol_or_eof():
|
||||
break
|
||||
if not t.is_identifier():
|
||||
raise dns.exception.SyntaxError
|
||||
chunks.append(t.value.encode())
|
||||
b64 = b''.join(chunks)
|
||||
certificate = base64.b64decode(b64)
|
||||
return cls(rdclass, rdtype, certificate_type, key_tag,
|
||||
algorithm, certificate)
|
||||
|
||||
def to_wire(self, file, compress=None, origin=None):
|
||||
prefix = struct.pack("!HHB", self.certificate_type, self.key_tag,
|
||||
self.algorithm)
|
||||
file.write(prefix)
|
||||
file.write(self.certificate)
|
||||
|
||||
@classmethod
|
||||
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
|
||||
prefix = wire[current: current + 5].unwrap()
|
||||
current += 5
|
||||
rdlen -= 5
|
||||
if rdlen < 0:
|
||||
raise dns.exception.FormError
|
||||
(certificate_type, key_tag, algorithm) = struct.unpack("!HHB", prefix)
|
||||
certificate = wire[current: current + rdlen].unwrap()
|
||||
return cls(rdclass, rdtype, certificate_type, key_tag, algorithm,
|
||||
certificate)
|
||||
|