Compare commits
63 Commits
v2.1.1-bet
...
v2.1.6-bet
Author | SHA1 | Date | |
---|---|---|---|
![]() |
e91ba46265 | ||
![]() |
62104c95e3 | ||
![]() |
178bd89e7c | ||
![]() |
365260401c | ||
![]() |
04029bd4d3 | ||
![]() |
9cf1128712 | ||
![]() |
2eebce9f6c | ||
![]() |
b08e071b81 | ||
![]() |
7778d84728 | ||
![]() |
8e3fe7bfa2 | ||
![]() |
6f22c823be | ||
![]() |
34d7c67813 | ||
![]() |
862ed5ce9f | ||
![]() |
84406e6797 | ||
![]() |
19cf567366 | ||
![]() |
8af697a157 | ||
![]() |
76122bea5d | ||
![]() |
1a12422908 | ||
![]() |
2df9f0b48b | ||
![]() |
8540b80e57 | ||
![]() |
8ad565a444 | ||
![]() |
f91b6481b3 | ||
![]() |
826db082c9 | ||
![]() |
f3d64a7886 | ||
![]() |
031d078bc2 | ||
![]() |
04fcd78102 | ||
![]() |
53d1e0f541 | ||
![]() |
9719f0b25b | ||
![]() |
6d1d5bc822 | ||
![]() |
0d7bbe044d | ||
![]() |
b1dc5816a4 | ||
![]() |
476011a783 | ||
![]() |
e038c57c4c | ||
![]() |
a989a53750 | ||
![]() |
d8cfdea704 | ||
![]() |
ed4722c4ce | ||
![]() |
17ab5f05ed | ||
![]() |
71ab2248d7 | ||
![]() |
4fb4410552 | ||
![]() |
a915d2333f | ||
![]() |
aaf5a18251 | ||
![]() |
b90026801b | ||
![]() |
e9676e3651 | ||
![]() |
c16d3288d8 | ||
![]() |
0d7ade8ca4 | ||
![]() |
87b1118e98 | ||
![]() |
9f6422cc8d | ||
![]() |
df1a42a4ee | ||
![]() |
6554136a8f | ||
![]() |
81e04269fd | ||
![]() |
b6c6590a12 | ||
![]() |
136260a822 | ||
![]() |
5710bcb43c | ||
![]() |
30bc3f8a66 | ||
![]() |
e0e7d68df2 | ||
![]() |
cf73639281 | ||
![]() |
008e04d5cf | ||
![]() |
5f7991665c | ||
![]() |
5e000162c6 | ||
![]() |
ea1aba2c87 | ||
![]() |
f321bb869c | ||
![]() |
abe496668a | ||
![]() |
9cefc7f701 |
56
CHANGELOG.md
56
CHANGELOG.md
@@ -1,5 +1,61 @@
|
||||
# Changelog
|
||||
|
||||
## v2.1.6-beta (2018-05-09)
|
||||
|
||||
* Newsletters:
|
||||
* Change: Setting to specify static URL ID name instead of using the newsletter ID number.
|
||||
* Change: Reorganize newsletter config options.
|
||||
|
||||
|
||||
## v2.1.5-beta (2018-05-07)
|
||||
|
||||
* Newsletters:
|
||||
* New: Added setting for a custom newsletter template folder.
|
||||
* New: Added option to enable static newsletter URLs to retrieve the last sent scheduled newsletter.
|
||||
* New: Added ability to change the newsletter output directory and filenames.
|
||||
* New: Added option to save the newsletter file without sending it to a notification agent.
|
||||
* Fix: Check for disabled image hosting setting.
|
||||
* Fix: Cache newsletter images when refreshing the page.
|
||||
* Fix: Refresh image from the Plex server when uploading to image hosting.
|
||||
* Change: Allow all image hosting options with self-hosted newsletters.
|
||||
* UI:
|
||||
* Change: Don't retrieve recently added on the homepage if the Plex Cloud server is sleeping.
|
||||
* Other:
|
||||
* Fix: Imgur database upgrade migration.
|
||||
|
||||
|
||||
## v2.1.4 (2018-05-05)
|
||||
|
||||
* Newsletters:
|
||||
* Fix: Newsletter URL without an HTTP root.
|
||||
|
||||
|
||||
## v2.1.3-beta (2018-05-04)
|
||||
|
||||
* Newsletters:
|
||||
* Fix: HTTP root doubled in newsletter URL.
|
||||
* Fix: Configuration would not open with failed hostname resolution.
|
||||
* Fix: Schedule one day off when using weekday names in cron.
|
||||
* Fix: Images not refreshing when changed in Plex.
|
||||
* Fix: Cloudinary upload with non-ASCII image titles.
|
||||
* Other:
|
||||
* Fix: Potential XSS vulnerability in search.
|
||||
|
||||
|
||||
## v2.1.2-beta (2018-05-01)
|
||||
|
||||
* Newsletters:
|
||||
* New: Added Cloudinary option for image hosting.
|
||||
* Notifications:
|
||||
* New: Added Message-ID to Email header (Thanks @Dam64)
|
||||
* Fix: Posters not showing up on Twitter with self-hosted images.
|
||||
* Fix: Incorrect action parameter for new device notifications.
|
||||
* Change: Hardcode Pushover sound list instead of fetching the list every time.
|
||||
* API:
|
||||
* Fix: Success result for empty response data.
|
||||
* Change: Do not send notification when checking for Tautulli updates via the API.
|
||||
|
||||
|
||||
## v2.1.1-beta (2018-04-11)
|
||||
|
||||
* Monitoring:
|
||||
|
@@ -4092,4 +4092,9 @@ a:hover .overlay-refresh-image:hover {
|
||||
}
|
||||
a[data-tab-destination] {
|
||||
cursor: pointer;
|
||||
}
|
||||
.modal-config-section {
|
||||
margin-top: 10px !important;
|
||||
padding-top: 10px;
|
||||
border-top: 1px solid #444;
|
||||
}
|
@@ -5,6 +5,7 @@
|
||||
</%def>
|
||||
|
||||
<%def name="body()">
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
<div class="container-fluid">
|
||||
% for section in config['home_sections']:
|
||||
% if section == 'current_activity':
|
||||
@@ -22,9 +23,10 @@
|
||||
</h3>
|
||||
</div>
|
||||
<div id="currentActivity">
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
% if PLEX_SERVER_UP:
|
||||
<div class="text-muted" id="dashboard-checking-activity"><i class="fa fa-refresh fa-spin"></i> Checking for activity...</div>
|
||||
% elif config['pms_is_cloud']:
|
||||
<div id="dashboard-no-activity" class="text-muted">Plex Cloud server is sleeping.</div>
|
||||
% else:
|
||||
<div id="dashboard-no-activity" class="text-muted">There was an error communicating with your Plex Server.
|
||||
% if _session['user_group'] == 'admin':
|
||||
@@ -133,7 +135,17 @@
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<div id="recentlyAdded" style="margin-right: -15px;">
|
||||
% if PLEX_SERVER_UP:
|
||||
<div class="text-muted"><i class="fa fa-refresh fa-spin"></i> Looking for new items...</div>
|
||||
% elif config['pms_is_cloud']:
|
||||
<div class="text-muted">Plex Cloud server is sleeping.</div>
|
||||
% else:
|
||||
<div class="text-muted">There was an error communicating with your Plex Server.
|
||||
% if _session['user_group'] == 'admin':
|
||||
Check the <a href="logs">logs</a> and verify your server connection in the <a href="settings#tab_tabs-plex_media_server">settings</a>.
|
||||
% endif
|
||||
</div>
|
||||
% endif
|
||||
<br>
|
||||
</div>
|
||||
</div>
|
||||
@@ -220,6 +232,7 @@
|
||||
</%def>
|
||||
|
||||
<%def name="javascriptIncludes()">
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
<script src="${http_root}js/moment-with-locale.js"></script>
|
||||
<script src="${http_root}js/jquery.scrollbar.min.js"></script>
|
||||
<script src="${http_root}js/jquery.mousewheel.min.js"></script>
|
||||
@@ -252,7 +265,6 @@
|
||||
});
|
||||
}
|
||||
</script>
|
||||
<% from plexpy import PLEX_SERVER_UP %>
|
||||
% if 'current_activity' in config['home_sections'] and PLEX_SERVER_UP:
|
||||
<script>
|
||||
var defaultHandler = {
|
||||
@@ -744,7 +756,7 @@
|
||||
getLibraryStats();
|
||||
</script>
|
||||
% endif
|
||||
% if 'recently_added' in config['home_sections']:
|
||||
% if 'recently_added' in config['home_sections'] and PLEX_SERVER_UP:
|
||||
<script>
|
||||
function recentlyAdded(recently_added_count, recently_added_type) {
|
||||
showMsg("Loading recently added items...", true, false, 0);
|
||||
|
@@ -400,14 +400,14 @@ DOCUMENTATION :: END
|
||||
% if data.get('poster_url'):
|
||||
<div class="btn-group">
|
||||
% if data['media_type'] == 'artist' or data['media_type'] == 'album' or data['media_type'] == 'track':
|
||||
<span class="imgur-poster-tooltip" data-toggle="popover" data-img="${data['poster_url']}" data-height="80" data-width="80" style="display: inline-flex;">
|
||||
<span class="hosted-poster-tooltip" data-toggle="popover" data-img="${data['poster_url']}" data-height="80" data-width="80" style="display: inline-flex;">
|
||||
% else:
|
||||
<span class="imgur-poster-tooltip" data-toggle="popover" data-img="${data['poster_url']}" data-height="120" data-width="80" style="display: inline-flex;">
|
||||
<span class="hosted-poster-tooltip" data-toggle="popover" data-img="${data['poster_url']}" data-height="120" data-width="80" style="display: inline-flex;">
|
||||
% endif
|
||||
<button class="btn btn-danger btn-edit" data-toggle="modal" aria-pressed="false" autocomplete="off" id="delete-imgur-poster"
|
||||
<button class="btn btn-danger btn-edit" data-toggle="modal" aria-pressed="false" autocomplete="off" id="delete-hosted-poster"
|
||||
data-id="${data['parent_rating_key'] if data['media_type'] in ('episode', 'track') else data['rating_key']}"
|
||||
data-title="${data["poster_title"]}">
|
||||
<i class="fa fa-picture-o"></i> Delete Imgur Poster
|
||||
<i class="fa fa-picture-o"></i> Delete ${data['img_service']} Poster
|
||||
</button>
|
||||
</span>
|
||||
</div>
|
||||
@@ -705,7 +705,7 @@ DOCUMENTATION :: END
|
||||
</script>
|
||||
% if data.get('poster_url'):
|
||||
<script>
|
||||
$('.imgur-poster-tooltip').popover({
|
||||
$('.hosted-poster-tooltip').popover({
|
||||
html: true,
|
||||
container: 'body',
|
||||
trigger: 'hover',
|
||||
@@ -716,14 +716,14 @@ DOCUMENTATION :: END
|
||||
}
|
||||
});
|
||||
|
||||
$('#delete-imgur-poster').on('click', function () {
|
||||
var msg = 'Are you sure you want to delete the Imgur poster for <strong>' + $(this).data('title') + '</strong>?<br><br>' +
|
||||
$('#delete-hosted-poster').on('click', function () {
|
||||
var msg = 'Are you sure you want to delete the ${data['img_service']} poster for <strong>' + $(this).data('title') + '</strong>?<br><br>' +
|
||||
'All previous links to this image will no longer work.';
|
||||
var url = 'delete_imgur_poster';
|
||||
var url = 'delete_hosted_images';
|
||||
var data = { rating_key: $(this).data('id') };
|
||||
var callback = function () {
|
||||
$('.imgur-poster-tooltip').popover('destroy');
|
||||
$('#delete-imgur-poster').closest('.btn-group').remove();
|
||||
$('.hosted-poster-tooltip').popover('destroy');
|
||||
$('#delete-hosted-poster').closest('.btn-group').remove();
|
||||
};
|
||||
confirmAjaxCall(url, msg, data, false, callback);
|
||||
});
|
||||
|
@@ -1,17 +1,17 @@
|
||||
function initConfigCheckbox(elem) {
|
||||
var config = $(elem).closest('div').next();
|
||||
function initConfigCheckbox(elem, toggleElem = null, reverse = false) {
|
||||
var config = toggleElem ? $(toggleElem) : $(elem).closest('div').next();
|
||||
config.css('overflow', 'hidden');
|
||||
if ($(elem).is(":checked")) {
|
||||
config.show();
|
||||
config.toggle(!reverse);
|
||||
} else {
|
||||
config.hide();
|
||||
config.toggle(reverse);
|
||||
}
|
||||
$(elem).click(function () {
|
||||
var config = $(this).closest('div').next();
|
||||
var config = toggleElem ? $(toggleElem) : $(this).closest('div').next();
|
||||
if ($(this).is(":checked")) {
|
||||
config.slideDown();
|
||||
config.slideToggleBool(!reverse);
|
||||
} else {
|
||||
config.slideUp();
|
||||
config.slideToggleBool(reverse);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -449,4 +449,8 @@ function forceMinMax(elem) {
|
||||
|
||||
function capitalizeFirstLetter(string) {
|
||||
return string.charAt(0).toUpperCase() + string.slice(1);
|
||||
}
|
||||
|
||||
$.fn.slideToggleBool = function(bool, options) {
|
||||
return bool ? $(this).slideDown(options) : $(this).slideUp(options);
|
||||
}
|
@@ -20,7 +20,7 @@
|
||||
<div class="row">
|
||||
<ul class="nav nav-tabs list-unstyled" role="tablist">
|
||||
<li role="presentation" class="active"><a href="#tabs-newsletter_config" aria-controls="tabs-newsletter_config" role="tab" data-toggle="tab">Configuration</a></li>
|
||||
<li role="presentation"><a href="#tabs-newsletter_agent" aria-controls="tabs-newsletter_agent" role="tab" data-toggle="tab">Notification Agent</a></li>
|
||||
<li role="presentation"><a href="#tabs-newsletter_saving_sending" aria-controls="tabs-newsletter_saving_sending" role="tab" data-toggle="tab">Saving & Sending</a></li>
|
||||
<li role="presentation"><a href="#tabs-newsletter_text" aria-controls="tabs-newsletter_text" role="tab" data-toggle="tab">Newsletter Text</a></li>
|
||||
<li role="presentation"><a href="#tabs-test_newsletter" aria-controls="tabs-test_newsletter" role="tab" data-toggle="tab">Test Newsletter</a></li>
|
||||
</ul>
|
||||
@@ -70,7 +70,7 @@
|
||||
<p class="help-block">Set the time frame to include in the newsletter. Note: Days uses calendar days (i.e. since midnight).</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-12" style="padding-top: 10px; border-top: 1px solid #444;">
|
||||
<div class="col-md-12 modal-config-section">
|
||||
<input type="hidden" id="newsletter_id" name="newsletter_id" value="${newsletter['id']}" />
|
||||
<input type="hidden" id="agent_id" name="agent_id" value="${newsletter['agent_id']}" />
|
||||
% for item in newsletter['config_options']:
|
||||
@@ -165,7 +165,16 @@
|
||||
% endif
|
||||
% endfor
|
||||
</div>
|
||||
<div class="col-md-12" style="margin-top: 10px; padding-top: 10px; border-top: 1px solid #444;">
|
||||
<div class="col-md-12 modal-config-section">
|
||||
<div class="form-group">
|
||||
<label for="id_name">Unique ID Name</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="text" class="form-control" id="id_name" name="id_name" value="${newsletter['id_name']}" size="30">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Optional: Enter a unique ID name to create a static URL to the last sent scheduled newsletter at <span class="inline-pre">${http_root}newsletter/id/<id_name></span>. Only letters (a-z), numbers (0-9), underscores (_) and hyphens (-) are allowed. Leave blank to disable.</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="friendly_name">Description</label>
|
||||
<div class="row">
|
||||
@@ -178,12 +187,32 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-newsletter_agent">
|
||||
<div role="tabpanel" class="tab-pane" id="tabs-newsletter_saving_sending">
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<label>Saving</label>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="newsletter_config_formatted_checkbox" data-id="newsletter_config_formatted" class="checkboxes" value="1" ${checked(newsletter['config']['formatted'])}> Send newsletter as an HTML formatted Email
|
||||
<input type="checkbox" id="newsletter_config_save_only_checkbox" data-id="newsletter_config_save_only" class="checkboxes" value="1" ${checked(newsletter['config']['save_only'])}> Save HTML File Only
|
||||
</label>
|
||||
<p class="help-block">Enable to save the newsletter HTML file without sending it to any notification agent.</p>
|
||||
<input type="hidden" id="newsletter_config_save_only" name="newsletter_config_save_only" value="${newsletter['config']['save_only']}">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="newsletter_config_filename">HTML File Name</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="text" class="form-control" id="newsletter_config_filename" name="newsletter_config_filename" value="${newsletter['config']['filename']}" size="30">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Optional: Enter the file name to use when saving the newsletter (ending with <span class="inline-pre">.html</span>). You may use any of the <a href="#newsletter-text-sub-modal" data-toggle="modal">newsletter text parameters</a>. Leave blank for default.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-12 modal-config-section" id="newsletter_agent_options">
|
||||
<label>Sending</label>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" id="newsletter_config_formatted_checkbox" data-id="newsletter_config_formatted" class="checkboxes" value="1" ${checked(newsletter['config']['formatted'])}> Send Newsletter as an HTML Formatted Email
|
||||
</label>
|
||||
<p class="help-block">Enable to send the newsletter as an HTML formatted Email. Disable to only send a subject and body message to a different notification agent.</p>
|
||||
<input type="hidden" id="newsletter_config_formatted" name="newsletter_config_formatted" value="${newsletter['config']['formatted']}">
|
||||
@@ -234,100 +263,100 @@
|
||||
Note: Self-hosted newsletters must be enabled under <a data-tab-destination="tabs-notifications" data-dismiss="modal" data-target="#newsletter_self_hosted">Newsletters</a> to include a link to the newsletter.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div id="newsletter-email-config" class="col-md-12" style="padding-top: 10px; border-top: 1px solid #444;">
|
||||
% for item in newsletter['email_config_options']:
|
||||
% if item['input_type'] == 'help':
|
||||
<div class="form-group">
|
||||
<label>${item['label']}</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'text' or item['input_type'] == 'password':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
</div>
|
||||
<div id="newsletter-email-config">
|
||||
% for item in newsletter['email_config_options']:
|
||||
% if item['input_type'] == 'help':
|
||||
<div class="form-group">
|
||||
<label>${item['label']}</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'number':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-3">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30">
|
||||
% elif item['input_type'] == 'text' or item['input_type'] == 'password':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30" ${'readonly' if item.get('readonly') else ''}>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'button':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="button" class="btn btn-bright" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
% elif item['input_type'] == 'number':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-3">
|
||||
<input type="${item['input_type']}" class="form-control" id="${item['name']}" name="${item['name']}" value="${item['value']}" size="30">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'checkbox' and item['name'] != 'newsletter_email_html_support':
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" ${checked(item['value'])}> ${item['label']}
|
||||
</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
<input type="hidden" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
</div>
|
||||
% elif item['input_type'] == 'select':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
% for key, value in sorted(item['select_options'].iteritems()):
|
||||
% if key == item['value']:
|
||||
<option value="${key}" selected>${value}</option>
|
||||
% else:
|
||||
<option value="${key}">${value}</option>
|
||||
% endif
|
||||
% endfor
|
||||
</select>
|
||||
% elif item['input_type'] == 'button':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<input type="button" class="btn btn-bright" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'selectize':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
<option value="select-all">Select All</option>
|
||||
<option value="remove-all">Remove All</option>
|
||||
% if isinstance(item['select_options'], dict):
|
||||
% for section, options in item['select_options'].iteritems():
|
||||
<optgroup label="${section}">
|
||||
% for option in sorted(options, key=lambda x: x['text'].lower()):
|
||||
% elif item['input_type'] == 'checkbox' and item['name'] != 'newsletter_email_html_support':
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" data-id="${item['name']}" class="checkboxes" value="1" ${checked(item['value'])}> ${item['label']}
|
||||
</label>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
<input type="hidden" id="${item['name']}" name="${item['name']}" value="${item['value']}">
|
||||
</div>
|
||||
% elif item['input_type'] == 'select':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
% for key, value in sorted(item['select_options'].iteritems()):
|
||||
% if key == item['value']:
|
||||
<option value="${key}" selected>${value}</option>
|
||||
% else:
|
||||
<option value="${key}">${value}</option>
|
||||
% endif
|
||||
% endfor
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
% elif item['input_type'] == 'selectize':
|
||||
<div class="form-group">
|
||||
<label for="${item['name']}">${item['label']}</label>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<select class="form-control" id="${item['name']}" name="${item['name']}">
|
||||
<option value="select-all">Select All</option>
|
||||
<option value="remove-all">Remove All</option>
|
||||
% if isinstance(item['select_options'], dict):
|
||||
% for section, options in item['select_options'].iteritems():
|
||||
<optgroup label="${section}">
|
||||
% for option in sorted(options, key=lambda x: x['text'].lower()):
|
||||
<option value="${option['value']}">${option['text']}</option>
|
||||
% endfor
|
||||
</optgroup>
|
||||
% endfor
|
||||
% else:
|
||||
<option value="border-all"></option>
|
||||
% for option in sorted(item['select_options'], key=lambda x: x['text'].lower()):
|
||||
<option value="${option['value']}">${option['text']}</option>
|
||||
% endfor
|
||||
</optgroup>
|
||||
% endfor
|
||||
% else:
|
||||
<option value="border-all"></option>
|
||||
% for option in sorted(item['select_options'], key=lambda x: x['text'].lower()):
|
||||
<option value="${option['value']}">${option['text']}</option>
|
||||
% endfor
|
||||
% endif
|
||||
</select>
|
||||
% endif
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
</div>
|
||||
<p class="help-block">${item['description'] | n}</p>
|
||||
% endif
|
||||
% endfor
|
||||
<input type="hidden" id="newsletter_email_html_support" name="newsletter_email_html_support" value="1">
|
||||
</div>
|
||||
% endif
|
||||
% endfor
|
||||
<input type="hidden" id="newsletter_email_html_support" name="newsletter_email_html_support" value="1">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -458,6 +487,26 @@
|
||||
toggleCustomCron();
|
||||
});
|
||||
|
||||
function validateFilename() {
|
||||
var filename = $('#newsletter_config_filename').val();
|
||||
if (filename !== '' && !(filename.endsWith('.html'))) {
|
||||
showMsg('<i class="fa fa-times"></i> Failed to save newsletter. Invalid file name.', false, true, 5000, true);
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
function validateIDName() {
|
||||
var id_name = $('#id_name').val();
|
||||
if (/^[a-zA-Z0-9_-]*$/.test(id_name)) {
|
||||
return true;
|
||||
} else {
|
||||
showMsg('<i class="fa fa-times"></i> Failed to save newsletter. Invalid unique ID name.', false, true, 5000, true);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
var $incl_libraries = $('#newsletter_config_incl_libraries').selectize({
|
||||
plugins: ['remove_button'],
|
||||
maxItems: null,
|
||||
@@ -485,6 +534,8 @@
|
||||
var incl_libraries = $incl_libraries[0].selectize;
|
||||
incl_libraries.setValue(${json.dumps(next((c['value'] for c in newsletter['config_options'] if c['name'] == 'newsletter_config_incl_libraries'), [])) | n});
|
||||
|
||||
initConfigCheckbox('#newsletter_config_save_only_checkbox', '#newsletter_agent_options', true);
|
||||
|
||||
function toggleEmailSelect () {
|
||||
if ($('#newsletter_config_formatted_checkbox').is(':checked')) {
|
||||
$('#newsletter_body').hide();
|
||||
@@ -643,7 +694,9 @@
|
||||
if ($('#custom_cron').val() === '0'){
|
||||
$("#cron_value").val(cron_widget.cron('value'));
|
||||
}
|
||||
doAjaxCall('set_newsletter_config', $(this), 'tabs', true, true, saveCallback);
|
||||
if (validateFilename() && validateIDName()){
|
||||
doAjaxCall('set_newsletter_config', $(this), 'tabs', true, true, saveCallback);
|
||||
}
|
||||
}
|
||||
|
||||
$('#delete-newsletter-item').click(function () {
|
||||
|
@@ -32,7 +32,7 @@
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
var frame = $('<iframe></iframe>', {
|
||||
src: '${http_root}real_newsletter?${urllib.urlencode(kwargs) | n}',
|
||||
src: 'real_newsletter?${urllib.urlencode(kwargs) | n}',
|
||||
frameborder: '0',
|
||||
style: 'display: none; height: 100vh; width: 100vw;'
|
||||
});
|
||||
|
@@ -123,7 +123,7 @@
|
||||
% endif
|
||||
% endfor
|
||||
</div>
|
||||
<div class="col-md-12" style="margin-top: 10px; padding-top: 10px; border-top: 1px solid #444;">
|
||||
<div class="col-md-12 modal-config-section">
|
||||
<div class="form-group">
|
||||
<label for="friendly_name">Description</label>
|
||||
<div class="row">
|
||||
|
@@ -28,15 +28,17 @@
|
||||
|
||||
<%def name="javascriptIncludes()">
|
||||
<script>
|
||||
var query_string = "${query.replace('"','\\"').replace('/','\\/') | n}";
|
||||
|
||||
$('#search_button').removeClass('btn-inactive');
|
||||
$('#query').val("${query.replace('"','\\"') | n}").css({ right: '0', width: '250px' }).addClass('active');
|
||||
$('#query').val(query_string).css({ right: '0', width: '250px' }).addClass('active');
|
||||
|
||||
$.ajax({
|
||||
url: 'get_search_results_children',
|
||||
type: "GET",
|
||||
type: "POST",
|
||||
async: true,
|
||||
data: {
|
||||
query: "${query.replace('"','\\"') | n}",
|
||||
query: query_string,
|
||||
limit: 30
|
||||
},
|
||||
complete: function (xhr, status) {
|
||||
|
@@ -274,7 +274,7 @@
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="home_refresh_interval">Activty Refresh Interval</label>
|
||||
<label for="home_refresh_interval">Activity Refresh Interval</label>
|
||||
<div class="row">
|
||||
<div class="col-md-2">
|
||||
<input type="text" class="form-control" data-parsley-type="integer" id="home_refresh_interval" name="home_refresh_interval" value="${config['home_refresh_interval']}" size="5" data-parsley-min="2" data-parsley-trigger="change" data-parsley-errors-container="#home_refresh_interval_error" required>
|
||||
@@ -702,6 +702,17 @@
|
||||
The server URL that Tautulli will use to connect to your Plex server. Retrieved automatically.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="pms_url">Plex Server Identifier</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="pms_identifier" name="pms_identifier" value="${config['pms_identifier']}" size="30" readonly>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
The unique identifier for your Plex server. Retrieved automatically.
|
||||
</p>
|
||||
</div>
|
||||
<div class="checkbox advanced-setting">
|
||||
<label>
|
||||
<input type="checkbox" class="pms-settings" id="pms_url_manual" name="pms_url_manual" value="1" ${config['pms_url_manual']}> Manual Connection
|
||||
@@ -728,7 +739,6 @@
|
||||
</div>
|
||||
|
||||
<input type="hidden" id="pms_is_cloud" name="pms_is_cloud" value="${config['pms_is_cloud']}">
|
||||
<input type="hidden" id="pms_identifier" name="pms_identifier" value="${config['pms_identifier']}">
|
||||
<input type="checkbox" name="server_changed" id="server_changed" value="1" style="display: none;">
|
||||
|
||||
<div class="form-group advanced-setting">
|
||||
@@ -955,10 +965,31 @@
|
||||
<p class="help-block">Enable to host newsletters on your own domain. This will generate a link to an HTML page where you can view the newsletter.</p>
|
||||
</div>
|
||||
<div id="self_host_newsletter_options" style="overlfow: hidden; display: ${'block' if config['newsletter_self_hosted'] == 'checked' else 'none'}">
|
||||
<p class="help-block" id="self_host_newsletter_message">Note: The <span class="inline-pre">${http_root}newsletter</span> endpoint on your domain must be publicly accessible from the internet.</p>
|
||||
<p class="help-block" id="self_host_newsletter_message">
|
||||
Note: The <span class="inline-pre">${http_root}newsletter</span> endpoint on your domain must be publicly accessible from the internet.
|
||||
</p>
|
||||
<p class="help-block settings-warning base-url-warning">Warning: Public Tautulli domain not set under <a data-tab-destination="tabs-web_interface" data-target="#http_base_url">Web Interface</a>.</p>
|
||||
</div>
|
||||
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="newsletter_dir">Custom Newsletter Templates Folder</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="newsletter_custom_dir" name="newsletter_custom_dir" value="${config['newsletter_custom_dir']}">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Optional: Enter the full path to your custom newsletter templates folder. Leave blank for default.</p>
|
||||
</div>
|
||||
<div class="form-group advanced-setting">
|
||||
<label for="newsletter_dir">Newsletter Output Directory</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="newsletter_dir" name="newsletter_dir" value="${config['newsletter_dir']}">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">Enter the full path to where newsletter files will be saved.</p>
|
||||
</div>
|
||||
|
||||
<div class="padded-header">
|
||||
<h3>3rd Party APIs</h3>
|
||||
</div>
|
||||
@@ -970,6 +1001,7 @@
|
||||
<select class="form-control" id="notify_upload_posters" name="notify_upload_posters">
|
||||
<option value="0" ${'selected' if config['notify_upload_posters'] == 0 else ''}>Disabled</option>
|
||||
<option value="1" ${'selected' if config['notify_upload_posters'] == 1 else ''}>Imgur</option>
|
||||
<option value="3" ${'selected' if config['notify_upload_posters'] == 3 else ''}>Cloudinary</option>
|
||||
<option value="2" ${'selected' if config['notify_upload_posters'] == 2 else ''}>Self-hosted on public Tautulli domain</option>
|
||||
</select>
|
||||
</div>
|
||||
@@ -977,6 +1009,10 @@
|
||||
<p class="help-block">Select where to host Plex images for notifications and newsletters.</p>
|
||||
</div>
|
||||
<div id="imgur_upload_options" style="overlfow: hidden; display: ${'none' if config['notify_upload_posters'] != 1 else 'block'}">
|
||||
<p class="help-block" id="imgur_upload_message">
|
||||
You can register a new Imgur application <a href="${anon_url('https://api.imgur.com/oauth2/addclient')}" target="_blank">here</a>.<br>
|
||||
Warning: Imgur uploads are rate-limited and newsletters may exceed the limit. Please use Cloudinary for newsletters instead.
|
||||
</p>
|
||||
<div class="form-group">
|
||||
<label for="imgur_client_id">Imgur Client ID</label>
|
||||
<div class="row">
|
||||
@@ -984,16 +1020,51 @@
|
||||
<input type="text" class="form-control" id="imgur_client_id" name="imgur_client_id" value="${config['imgur_client_id']}" data-parsley-trigger="change">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter your Imgur API client ID in order to upload posters.
|
||||
You can register a new application <a href="${anon_url('https://api.imgur.com/oauth2/addclient')}" target="_blank">here</a>.
|
||||
</p>
|
||||
<p class="help-block">Enter your Imgur API Client ID.</p>
|
||||
</div>
|
||||
</div>
|
||||
<div id="self_host_image_options" style="overlfow: hidden; display: ${'none' if config['notify_upload_posters'] != 2 else 'block'}">
|
||||
<p class="help-block" id="self_host_image_message">Note: The <span class="inline-pre">${http_root}image</span> endpoint on your domain must be publicly accessible from the internet.</p>
|
||||
<p class="help-block settings-warning base-url-warning">Warning: Public Tautulli domain not set under <a data-tab-destination="tabs-web_interface" data-target="#http_base_url">Web Interface</a>.</p>
|
||||
</div>
|
||||
<div id="cloudinary_upload_options" style="overlfow: hidden; display: ${'none' if config['notify_upload_posters'] != 3 else 'block'}">
|
||||
<p class="help-block" id="imgur_upload_message">
|
||||
You can sign up for Cloudinary <a href="${anon_url('https://cloudinary.com')}" target="_blank">here</a>.<br>
|
||||
</p>
|
||||
<div class="form-group">
|
||||
<label for="cloudinary_cloud_name">Cloudinary Cloud Name</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="cloudinary_cloud_name" name="cloudinary_cloud_name" value="${config['cloudinary_cloud_name']}" data-parsley-trigger="change">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter your Cloudinary Cloud Name.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="cloudinary_api_key">Cloudinary API Key</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="cloudinary_api_key" name="cloudinary_api_key" value="${config['cloudinary_api_key']}" data-parsley-trigger="change">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter your Cloudinary API Key.
|
||||
</p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="cloudinary_api_secret">Cloudinary API Secret</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control" id="cloudinary_api_secret" name="cloudinary_api_secret" value="${config['cloudinary_api_secret']}" data-parsley-trigger="change">
|
||||
</div>
|
||||
</div>
|
||||
<p class="help-block">
|
||||
Enter your Cloudinary API Secret.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
<input type="checkbox" name="themoviedb_lookup" id="themoviedb_lookup" value="1" ${config['themoviedb_lookup']}> Lookup TheMovieDB Links
|
||||
@@ -1041,7 +1112,7 @@
|
||||
Add a new newsletter agent, or configure an existing newsletter agent by clicking the settings icon on the right.
|
||||
</p>
|
||||
<p class="help-block settings-warning" id="newsletter_upload_warning">
|
||||
Note: Either <a data-tab-destination="tabs-notifications" data-target="#notify_upload_posters">Image Hosting</a> on Imgur or <a data-tab-destination="tabs-notifications" data-target="#newsletter_self_hosted">Self-Hosted Newsletters</a> must be enabled.</span>
|
||||
Warning: The <a data-tab-destination="tabs-notifications" data-target="#notify_upload_posters">Image Hosting</a> setting must be enabled for images to display on the newsletter.</span>
|
||||
</p>
|
||||
<br/>
|
||||
<div id="plexpy-newsletters-table">
|
||||
@@ -1130,14 +1201,6 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="newsletter_dir">Newsletter Directory</label>
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<input type="text" class="form-control directory-settings" id="newsletter_dir" name="newsletter_dir" value="${config['newsletter_dir']}">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<p><input type="button" class="btn btn-bright save-button" value="Save" data-success="Changes saved successfully"></p>
|
||||
|
||||
@@ -2538,6 +2601,11 @@ $(document).ready(function() {
|
||||
} else {
|
||||
$('#self_host_image_options').slideUp();
|
||||
}
|
||||
if (upload_val === '3') {
|
||||
$('#cloudinary_upload_options').slideDown();
|
||||
} else {
|
||||
$('#cloudinary_upload_options').slideUp();
|
||||
}
|
||||
}
|
||||
$('#notify_upload_posters').change(function () {
|
||||
imageUpload();
|
||||
@@ -2557,10 +2625,10 @@ $(document).ready(function() {
|
||||
});
|
||||
|
||||
function newsletterUploadEnabled() {
|
||||
if ($('#notify_upload_posters').val() === '1' || $('#newsletter_self_hosted').is(':checked')) {
|
||||
$('#newsletter_upload_warning').hide();
|
||||
} else {
|
||||
if ($('#notify_upload_posters').val() === '0') {
|
||||
$('#newsletter_upload_warning').show();
|
||||
} else {
|
||||
$('#newsletter_upload_warning').hide();
|
||||
}
|
||||
}
|
||||
newsletterUploadEnabled();
|
||||
|
@@ -96,7 +96,7 @@ DOCUMENTATION :: END
|
||||
</div>
|
||||
</div>
|
||||
<div class='table-card-back'>
|
||||
<div id="search-results-list"><i class="fa fa-refresh fa-spin"></i> Loading search results...</div>
|
||||
<div id="search-results-list" class="children-list"><i class="fa fa-refresh fa-spin"></i> Loading search results...</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -188,7 +188,7 @@ DOCUMENTATION :: END
|
||||
},
|
||||
complete: function (xhr, status) {
|
||||
$('#search-results-list').html(xhr.responseText);
|
||||
$('#update_query_title').html(query_string)
|
||||
$('#update_query_title').text(query_string)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@@ -1,17 +1,24 @@
|
||||
% if data:
|
||||
<%
|
||||
import plexpy
|
||||
from plexpy.helpers import grouper
|
||||
from plexpy.helpers import grouper, get_img_service
|
||||
|
||||
recently_added = data['recently_added']
|
||||
if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'newsletter/'
|
||||
base_url_image = base_url + 'image/'
|
||||
elif preview:
|
||||
base_url = 'newsletter/'
|
||||
base_url_image = base_url + 'image/'
|
||||
else:
|
||||
base_url = base_url_image = ''
|
||||
base_url = ''
|
||||
|
||||
service = get_img_service(include_self=True)
|
||||
if service == 'self-hosted' and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url_image = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'newsletter/image/'
|
||||
elif service and service != 'self-hosted' and preview:
|
||||
base_url_image = 'newsletter/image/'
|
||||
else:
|
||||
base_url_image = ''
|
||||
|
||||
%>
|
||||
<!doctype html>
|
||||
<html>
|
||||
@@ -83,6 +90,14 @@
|
||||
/* -------------------------------------
|
||||
HEADER, FOOTER, MAIN
|
||||
------------------------------------- */
|
||||
.local-preview-note {
|
||||
text-align: center;
|
||||
padding-top: 10px;
|
||||
}
|
||||
.local-preview-note p {
|
||||
color: #282A2D;
|
||||
font-size: 12px;
|
||||
}
|
||||
.main {
|
||||
background: #282A2D;
|
||||
border-radius: 3px;
|
||||
@@ -608,6 +623,11 @@
|
||||
</style>
|
||||
</head>
|
||||
<body class="" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;-webkit-font-smoothing: antialiased;font-size: 14px;line-height: 1.4;margin: 0;padding: 0;-ms-text-size-adjust: 100%;-webkit-text-size-adjust: 100%;">
|
||||
% if preview and service:
|
||||
<div class="local-preview-note" style="text-align: center;padding-top: 10px;"><p style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;color: #282A2D;font-size: 12px;">Note: Local preview images only - images will be uploaded to ${service.capitalize()} when the newsletter is sent.</p></div> <!-- IGNORE SAVE -->
|
||||
% elif preview and not service:
|
||||
<div class="local-preview-note" style="text-align: center;padding-top: 10px;"><p style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-weight: 400;margin: 0;color: #282A2D;font-size: 12px;">Warning: The Image Hosting setting must be enabled for images to display on the newsletter.</p></div> <!-- IGNORE SAVE -->
|
||||
% endif
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="body" style="border-collapse: separate;mso-table-lspace: 0pt;mso-table-rspace: 0pt;width: 100%;">
|
||||
<tr>
|
||||
<td class="container" style="font-family: 'Open Sans', Helvetica, Arial, sans-serif;font-size: 14px;vertical-align: top;display: block;max-width: 1042px;padding: 10px;width: 1042px;margin: 0 auto !important;">
|
||||
|
@@ -1,17 +1,24 @@
|
||||
% if data:
|
||||
<%
|
||||
import plexpy
|
||||
from plexpy.helpers import grouper
|
||||
from plexpy.helpers import grouper, get_img_service
|
||||
|
||||
recently_added = data['recently_added']
|
||||
if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'newsletter/'
|
||||
base_url_image = base_url + 'image/'
|
||||
elif preview:
|
||||
base_url = 'newsletter/'
|
||||
base_url_image = base_url + 'image/'
|
||||
else:
|
||||
base_url = base_url_image = ''
|
||||
base_url = ''
|
||||
|
||||
service = get_img_service(include_self=True)
|
||||
if service == 'self-hosted' and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url_image = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'newsletter/image/'
|
||||
elif service and service != 'self-hosted' and preview:
|
||||
base_url_image = 'newsletter/image/'
|
||||
else:
|
||||
base_url_image = ''
|
||||
|
||||
%>
|
||||
<!doctype html>
|
||||
<html>
|
||||
@@ -83,6 +90,14 @@
|
||||
/* -------------------------------------
|
||||
HEADER, FOOTER, MAIN
|
||||
------------------------------------- */
|
||||
.local-preview-note {
|
||||
text-align: center;
|
||||
padding-top: 10px;
|
||||
}
|
||||
.local-preview-note p {
|
||||
color: #282A2D;
|
||||
font-size: 12px;
|
||||
}
|
||||
.main {
|
||||
background: #282A2D;
|
||||
border-radius: 3px;
|
||||
@@ -608,6 +623,11 @@
|
||||
</style>
|
||||
</head>
|
||||
<body class="">
|
||||
% if preview and service:
|
||||
<div class="local-preview-note"><p>Note: Local preview images only - images will be uploaded to ${service.capitalize()} when the newsletter is sent.</p></div> <!-- IGNORE SAVE -->
|
||||
% elif preview and not service:
|
||||
<div class="local-preview-note"><p>Warning: The Image Hosting setting must be enabled for images to display on the newsletter.</p></div> <!-- IGNORE SAVE -->
|
||||
% endif
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="body">
|
||||
<tr>
|
||||
<td class="container">
|
||||
|
@@ -9,7 +9,7 @@ __all__ = ('AllExpression', 'RangeExpression', 'WeekdayRangeExpression',
|
||||
'WeekdayPositionExpression', 'LastDayOfMonthExpression')
|
||||
|
||||
|
||||
WEEKDAYS = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
|
||||
WEEKDAYS = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat']
|
||||
MONTHS = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
|
||||
|
||||
|
||||
|
302
lib/cloudinary/__init__.py
Normal file
302
lib/cloudinary/__init__.py
Normal file
@@ -0,0 +1,302 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger("Cloudinary")
|
||||
ch = logging.StreamHandler()
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from six import python_2_unicode_compatible
|
||||
|
||||
from cloudinary import utils
|
||||
from cloudinary.compat import urlparse, parse_qs
|
||||
from cloudinary.search import Search
|
||||
|
||||
CF_SHARED_CDN = "d3jpl91pxevbkh.cloudfront.net"
|
||||
OLD_AKAMAI_SHARED_CDN = "cloudinary-a.akamaihd.net"
|
||||
AKAMAI_SHARED_CDN = "res.cloudinary.com"
|
||||
SHARED_CDN = AKAMAI_SHARED_CDN
|
||||
CL_BLANK = "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7"
|
||||
|
||||
VERSION = "1.11.0"
|
||||
USER_AGENT = "CloudinaryPython/" + VERSION
|
||||
""" :const: USER_AGENT """
|
||||
|
||||
USER_PLATFORM = ""
|
||||
"""
|
||||
Additional information to be passed with the USER_AGENT, e.g. "CloudinaryMagento/1.0.1".
|
||||
This value is set in platform-specific implementations that use cloudinary_php.
|
||||
|
||||
The format of the value should be <ProductName>/Version[ (comment)].
|
||||
@see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.43
|
||||
|
||||
**Do not set this value in application code!**
|
||||
"""
|
||||
|
||||
|
||||
def get_user_agent():
|
||||
"""Provides the `USER_AGENT` string that is passed to the Cloudinary servers.
|
||||
Prepends `USER_PLATFORM` if it is defined.
|
||||
|
||||
:returns: the user agent
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
if USER_PLATFORM == "":
|
||||
return USER_AGENT
|
||||
else:
|
||||
return USER_PLATFORM + " " + USER_AGENT
|
||||
|
||||
|
||||
def import_django_settings():
|
||||
try:
|
||||
import django.conf
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
try:
|
||||
if 'CLOUDINARY' in dir(django.conf.settings):
|
||||
return django.conf.settings.CLOUDINARY
|
||||
else:
|
||||
return None
|
||||
except ImproperlyConfigured:
|
||||
return None
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
|
||||
class Config(object):
|
||||
def __init__(self):
|
||||
django_settings = import_django_settings()
|
||||
if django_settings:
|
||||
self.update(**django_settings)
|
||||
elif os.environ.get("CLOUDINARY_CLOUD_NAME"):
|
||||
self.update(
|
||||
cloud_name=os.environ.get("CLOUDINARY_CLOUD_NAME"),
|
||||
api_key=os.environ.get("CLOUDINARY_API_KEY"),
|
||||
api_secret=os.environ.get("CLOUDINARY_API_SECRET"),
|
||||
secure_distribution=os.environ.get("CLOUDINARY_SECURE_DISTRIBUTION"),
|
||||
private_cdn=os.environ.get("CLOUDINARY_PRIVATE_CDN") == 'true'
|
||||
)
|
||||
elif os.environ.get("CLOUDINARY_URL"):
|
||||
cloudinary_url = os.environ.get("CLOUDINARY_URL")
|
||||
self._parse_cloudinary_url(cloudinary_url)
|
||||
|
||||
def _parse_cloudinary_url(self, cloudinary_url):
|
||||
uri = urlparse(cloudinary_url.replace("cloudinary://", "http://"))
|
||||
for k, v in parse_qs(uri.query).items():
|
||||
if self._is_nested_key(k):
|
||||
self._put_nested_key(k, v)
|
||||
else:
|
||||
self.__dict__[k] = v[0]
|
||||
self.update(
|
||||
cloud_name=uri.hostname,
|
||||
api_key=uri.username,
|
||||
api_secret=uri.password,
|
||||
private_cdn=uri.path != ''
|
||||
)
|
||||
if uri.path != '':
|
||||
self.update(secure_distribution=uri.path[1:])
|
||||
|
||||
def __getattr__(self, i):
|
||||
if i in self.__dict__:
|
||||
return self.__dict__[i]
|
||||
else:
|
||||
return None
|
||||
|
||||
def update(self, **keywords):
|
||||
for k, v in keywords.items():
|
||||
self.__dict__[k] = v
|
||||
|
||||
def _is_nested_key(self, key):
|
||||
return re.match(r'\w+\[\w+\]', key)
|
||||
|
||||
def _put_nested_key(self, key, value):
|
||||
chain = re.split(r'[\[\]]+', key)
|
||||
chain = [key for key in chain if key]
|
||||
outer = self.__dict__
|
||||
last_key = chain.pop()
|
||||
for inner_key in chain:
|
||||
if inner_key in outer:
|
||||
inner = outer[inner_key]
|
||||
else:
|
||||
inner = dict()
|
||||
outer[inner_key] = inner
|
||||
outer = inner
|
||||
if isinstance(value, list):
|
||||
value = value[0]
|
||||
outer[last_key] = value
|
||||
|
||||
_config = Config()
|
||||
|
||||
|
||||
def config(**keywords):
|
||||
global _config
|
||||
_config.update(**keywords)
|
||||
return _config
|
||||
|
||||
|
||||
def reset_config():
|
||||
global _config
|
||||
_config = Config()
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class CloudinaryResource(object):
|
||||
def __init__(self, public_id=None, format=None, version=None,
|
||||
signature=None, url_options=None, metadata=None, type=None, resource_type=None,
|
||||
default_resource_type=None):
|
||||
self.metadata = metadata
|
||||
metadata = metadata or {}
|
||||
self.public_id = public_id or metadata.get('public_id')
|
||||
self.format = format or metadata.get('format')
|
||||
self.version = version or metadata.get('version')
|
||||
self.signature = signature or metadata.get('signature')
|
||||
self.type = type or metadata.get('type') or "upload"
|
||||
self.resource_type = resource_type or metadata.get('resource_type') or default_resource_type
|
||||
self.url_options = url_options or {}
|
||||
|
||||
def __str__(self):
|
||||
return self.public_id
|
||||
|
||||
def __len__(self):
|
||||
return len(self.public_id) if self.public_id is not None else 0
|
||||
|
||||
def validate(self):
|
||||
return self.signature == self.get_expected_signature()
|
||||
|
||||
def get_prep_value(self):
|
||||
if None in [self.public_id,
|
||||
self.type,
|
||||
self.resource_type]:
|
||||
return None
|
||||
prep = ''
|
||||
prep = prep + self.resource_type + '/' + self.type + '/'
|
||||
if self.version: prep = prep + 'v' + str(self.version) + '/'
|
||||
prep = prep + self.public_id
|
||||
if self.format: prep = prep + '.' + self.format
|
||||
return prep
|
||||
|
||||
def get_presigned(self):
|
||||
return self.get_prep_value() + '#' + self.get_expected_signature()
|
||||
|
||||
def get_expected_signature(self):
|
||||
return utils.api_sign_request({"public_id": self.public_id, "version": self.version}, config().api_secret)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return self.build_url(**self.url_options)
|
||||
|
||||
def __build_url(self, **options):
|
||||
combined_options = dict(format=self.format, version=self.version, type=self.type,
|
||||
resource_type=self.resource_type or "image")
|
||||
combined_options.update(options)
|
||||
public_id = combined_options.get('public_id') or self.public_id
|
||||
return utils.cloudinary_url(public_id, **combined_options)
|
||||
|
||||
def build_url(self, **options):
|
||||
return self.__build_url(**options)[0]
|
||||
|
||||
def default_poster_options(self, options):
|
||||
options["format"] = options.get("format", "jpg")
|
||||
|
||||
def default_source_types(self):
|
||||
return ['webm', 'mp4', 'ogv']
|
||||
|
||||
def image(self, **options):
|
||||
if options.get("resource_type", self.resource_type) == "video":
|
||||
self.default_poster_options(options)
|
||||
src, attrs = self.__build_url(**options)
|
||||
client_hints = attrs.pop("client_hints", config().client_hints)
|
||||
responsive = attrs.pop("responsive", False)
|
||||
hidpi = attrs.pop("hidpi", False)
|
||||
if (responsive or hidpi) and not client_hints:
|
||||
attrs["data-src"] = src
|
||||
classes = "cld-responsive" if responsive else "cld-hidpi"
|
||||
if "class" in attrs: classes += " " + attrs["class"]
|
||||
attrs["class"] = classes
|
||||
src = attrs.pop("responsive_placeholder", config().responsive_placeholder)
|
||||
if src == "blank": src = CL_BLANK
|
||||
|
||||
if src: attrs["src"] = src
|
||||
|
||||
return u"<img {0}/>".format(utils.html_attrs(attrs))
|
||||
|
||||
def video_thumbnail(self, **options):
|
||||
self.default_poster_options(options)
|
||||
return self.build_url(**options)
|
||||
|
||||
# Creates an HTML video tag for the provided +source+
|
||||
#
|
||||
# ==== Options
|
||||
# * <tt>source_types</tt> - Specify which source type the tag should include. defaults to webm, mp4 and ogv.
|
||||
# * <tt>source_transformation</tt> - specific transformations to use for a specific source type.
|
||||
# * <tt>poster</tt> - override default thumbnail:
|
||||
# * url: provide an ad hoc url
|
||||
# * options: with specific poster transformations and/or Cloudinary +:public_id+
|
||||
#
|
||||
# ==== Examples
|
||||
# CloudinaryResource("mymovie.mp4").video()
|
||||
# CloudinaryResource("mymovie.mp4").video(source_types = 'webm')
|
||||
# CloudinaryResource("mymovie.ogv").video(poster = "myspecialplaceholder.jpg")
|
||||
# CloudinaryResource("mymovie.webm").video(source_types = ['webm', 'mp4'], poster = {'effect': 'sepia'})
|
||||
def video(self, **options):
|
||||
public_id = options.get('public_id', self.public_id)
|
||||
source = re.sub("\.({0})$".format("|".join(self.default_source_types())), '', public_id)
|
||||
|
||||
source_types = options.pop('source_types', [])
|
||||
source_transformation = options.pop('source_transformation', {})
|
||||
fallback = options.pop('fallback_content', '')
|
||||
options['resource_type'] = options.pop('resource_type', self.resource_type or 'video')
|
||||
|
||||
if not source_types: source_types = self.default_source_types()
|
||||
video_options = options.copy()
|
||||
|
||||
if 'poster' in video_options:
|
||||
poster_options = video_options['poster']
|
||||
if isinstance(poster_options, dict):
|
||||
if 'public_id' in poster_options:
|
||||
video_options['poster'] = utils.cloudinary_url(poster_options['public_id'], **poster_options)[0]
|
||||
else:
|
||||
video_options['poster'] = self.video_thumbnail(public_id=source, **poster_options)
|
||||
else:
|
||||
video_options['poster'] = self.video_thumbnail(public_id=source, **options)
|
||||
|
||||
if not video_options['poster']: del video_options['poster']
|
||||
|
||||
nested_source_types = isinstance(source_types, list) and len(source_types) > 1
|
||||
if not nested_source_types:
|
||||
source = source + '.' + utils.build_array(source_types)[0]
|
||||
|
||||
video_url = utils.cloudinary_url(source, **video_options)
|
||||
video_options = video_url[1]
|
||||
if not nested_source_types:
|
||||
video_options['src'] = video_url[0]
|
||||
if 'html_width' in video_options: video_options['width'] = video_options.pop('html_width')
|
||||
if 'html_height' in video_options: video_options['height'] = video_options.pop('html_height')
|
||||
|
||||
sources = ""
|
||||
if nested_source_types:
|
||||
for source_type in source_types:
|
||||
transformation = options.copy()
|
||||
transformation.update(source_transformation.get(source_type, {}))
|
||||
src = utils.cloudinary_url(source, format=source_type, **transformation)[0]
|
||||
video_type = "ogg" if source_type == 'ogv' else source_type
|
||||
mime_type = "video/" + video_type
|
||||
sources += "<source {attributes}>".format(attributes=utils.html_attrs({'src': src, 'type': mime_type}))
|
||||
|
||||
html = "<video {attributes}>{sources}{fallback}</video>".format(
|
||||
attributes=utils.html_attrs(video_options), sources=sources, fallback=fallback)
|
||||
return html
|
||||
|
||||
|
||||
class CloudinaryImage(CloudinaryResource):
|
||||
def __init__(self, public_id=None, **kwargs):
|
||||
super(CloudinaryImage, self).__init__(public_id=public_id, default_resource_type="image", **kwargs)
|
||||
|
||||
|
||||
class CloudinaryVideo(CloudinaryResource):
|
||||
def __init__(self, public_id=None, **kwargs):
|
||||
super(CloudinaryVideo, self).__init__(public_id=public_id, default_resource_type="video", **kwargs)
|
448
lib/cloudinary/api.py
Normal file
448
lib/cloudinary/api.py
Normal file
@@ -0,0 +1,448 @@
|
||||
# Copyright Cloudinary
|
||||
|
||||
import email.utils
|
||||
import json
|
||||
import socket
|
||||
|
||||
import cloudinary
|
||||
from six import string_types
|
||||
|
||||
import urllib3
|
||||
import certifi
|
||||
|
||||
from cloudinary import utils
|
||||
from urllib3.exceptions import HTTPError
|
||||
|
||||
logger = cloudinary.logger
|
||||
|
||||
# intentionally one-liners
|
||||
class Error(Exception): pass
|
||||
class NotFound(Error): pass
|
||||
class NotAllowed(Error): pass
|
||||
class AlreadyExists(Error): pass
|
||||
class RateLimited(Error): pass
|
||||
class BadRequest(Error): pass
|
||||
class GeneralError(Error): pass
|
||||
class AuthorizationRequired(Error): pass
|
||||
|
||||
|
||||
EXCEPTION_CODES = {
|
||||
400: BadRequest,
|
||||
401: AuthorizationRequired,
|
||||
403: NotAllowed,
|
||||
404: NotFound,
|
||||
409: AlreadyExists,
|
||||
420: RateLimited,
|
||||
500: GeneralError
|
||||
}
|
||||
|
||||
|
||||
class Response(dict):
|
||||
def __init__(self, result, response, **kwargs):
|
||||
super(Response, self).__init__(**kwargs)
|
||||
self.update(result)
|
||||
self.rate_limit_allowed = int(response.headers["x-featureratelimit-limit"])
|
||||
self.rate_limit_reset_at = email.utils.parsedate(response.headers["x-featureratelimit-reset"])
|
||||
self.rate_limit_remaining = int(response.headers["x-featureratelimit-remaining"])
|
||||
|
||||
_http = urllib3.PoolManager(
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
ca_certs=certifi.where()
|
||||
)
|
||||
|
||||
|
||||
def ping(**options):
|
||||
return call_api("get", ["ping"], {}, **options)
|
||||
|
||||
|
||||
def usage(**options):
|
||||
return call_api("get", ["usage"], {}, **options)
|
||||
|
||||
|
||||
def resource_types(**options):
|
||||
return call_api("get", ["resources"], {}, **options)
|
||||
|
||||
|
||||
def resources(**options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", None)
|
||||
uri = ["resources", resource_type]
|
||||
if upload_type: uri.append(upload_type)
|
||||
params = only(options,
|
||||
"next_cursor", "max_results", "prefix", "tags", "context", "moderations", "direction", "start_at")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resources_by_tag(tag, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "tags", tag]
|
||||
params = only(options, "next_cursor", "max_results", "tags", "context", "moderations", "direction")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resources_by_moderation(kind, status, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "moderations", kind, status]
|
||||
params = only(options, "next_cursor", "max_results", "tags", "context", "moderations", "direction")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resources_by_ids(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = dict(only(options, "tags", "moderations", "context"), public_ids=public_ids)
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def resource(public_id, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type, public_id]
|
||||
params = only(options, "exif", "faces", "colors", "image_metadata", "pages", "phash", "coordinates", "max_results")
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def update(public_id, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type, public_id]
|
||||
params = only(options, "moderation_status", "raw_convert",
|
||||
"quality_override", "ocr",
|
||||
"categorization", "detection", "similarity_search",
|
||||
"background_removal", "notification_url")
|
||||
if "tags" in options:
|
||||
params["tags"] = ",".join(utils.build_array(options["tags"]))
|
||||
if "face_coordinates" in options:
|
||||
params["face_coordinates"] = utils.encode_double_array(options.get("face_coordinates"))
|
||||
if "custom_coordinates" in options:
|
||||
params["custom_coordinates"] = utils.encode_double_array(options.get("custom_coordinates"))
|
||||
if "context" in options:
|
||||
params["context"] = utils.encode_context(options.get("context"))
|
||||
if "auto_tagging" in options:
|
||||
params["auto_tagging"] = str(options.get("auto_tagging"))
|
||||
if "access_control" in options:
|
||||
params["access_control"] = utils.json_encode(utils.build_list_of_dicts(options.get("access_control")))
|
||||
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def delete_resources(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = __delete_resource_params(options, public_ids=public_ids)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_resources_by_prefix(prefix, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = __delete_resource_params(options, prefix=prefix)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_all_resources(**options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type]
|
||||
params = __delete_resource_params(options, all=True)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_resources_by_tag(tag, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "tags", tag]
|
||||
params = __delete_resource_params(options)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_derived_resources(derived_resource_ids, **options):
|
||||
uri = ["derived_resources"]
|
||||
params = {"derived_resource_ids": derived_resource_ids}
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def delete_derived_by_transformation(public_ids, transformations,
|
||||
resource_type='image', type='upload', invalidate=None,
|
||||
**options):
|
||||
"""
|
||||
Delete derived resources of public ids, identified by transformations
|
||||
|
||||
:param public_ids: the base resources
|
||||
:type public_ids: list of str
|
||||
:param transformations: the transformation of derived resources, optionally including the format
|
||||
:type transformations: list of (dict or str)
|
||||
:param type: The upload type
|
||||
:type type: str
|
||||
:param resource_type: The type of the resource: defaults to "image"
|
||||
:type resource_type: str
|
||||
:param invalidate: (optional) True to invalidate the resources after deletion
|
||||
:type invalidate: bool
|
||||
:return: a list of the public ids for which derived resources were deleted
|
||||
:rtype: dict
|
||||
"""
|
||||
uri = ["resources", resource_type, type]
|
||||
if not isinstance(public_ids, list):
|
||||
public_ids = [public_ids]
|
||||
params = {"public_ids": public_ids,
|
||||
"transformations": utils.build_eager(transformations),
|
||||
"keep_original": True}
|
||||
if invalidate is not None:
|
||||
params['invalidate'] = invalidate
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def tags(**options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["tags", resource_type]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results", "prefix"), **options)
|
||||
|
||||
|
||||
def transformations(**options):
|
||||
uri = ["transformations"]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def transformation(transformation, **options):
|
||||
uri = ["transformations", transformation_string(transformation)]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def delete_transformation(transformation, **options):
|
||||
uri = ["transformations", transformation_string(transformation)]
|
||||
return call_api("delete", uri, {}, **options)
|
||||
|
||||
|
||||
# updates - currently only supported update is the "allowed_for_strict" boolean flag and unsafe_update
|
||||
def update_transformation(transformation, **options):
|
||||
uri = ["transformations", transformation_string(transformation)]
|
||||
updates = only(options, "allowed_for_strict")
|
||||
if "unsafe_update" in options:
|
||||
updates["unsafe_update"] = transformation_string(options.get("unsafe_update"))
|
||||
if not updates: raise Exception("No updates given")
|
||||
|
||||
return call_api("put", uri, updates, **options)
|
||||
|
||||
|
||||
def create_transformation(name, definition, **options):
|
||||
uri = ["transformations", name]
|
||||
return call_api("post", uri, {"transformation": transformation_string(definition)}, **options)
|
||||
|
||||
|
||||
def publish_by_ids(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "publish_resources"]
|
||||
params = dict(only(options, "type", "overwrite", "invalidate"), public_ids=public_ids)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def publish_by_prefix(prefix, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "publish_resources"]
|
||||
params = dict(only(options, "type", "overwrite", "invalidate"), prefix=prefix)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def publish_by_tag(tag, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
uri = ["resources", resource_type, "publish_resources"]
|
||||
params = dict(only(options, "type", "overwrite", "invalidate"), tag=tag)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def upload_presets(**options):
|
||||
uri = ["upload_presets"]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def upload_preset(name, **options):
|
||||
uri = ["upload_presets", name]
|
||||
return call_api("get", uri, only(options, "max_results"), **options)
|
||||
|
||||
|
||||
def delete_upload_preset(name, **options):
|
||||
uri = ["upload_presets", name]
|
||||
return call_api("delete", uri, {}, **options)
|
||||
|
||||
|
||||
def update_upload_preset(name, **options):
|
||||
uri = ["upload_presets", name]
|
||||
params = utils.build_upload_params(**options)
|
||||
params = utils.cleanup_params(params)
|
||||
params.update(only(options, "unsigned", "disallow_public_id"))
|
||||
return call_api("put", uri, params, **options)
|
||||
|
||||
|
||||
def create_upload_preset(**options):
|
||||
uri = ["upload_presets"]
|
||||
params = utils.build_upload_params(**options)
|
||||
params = utils.cleanup_params(params)
|
||||
params.update(only(options, "unsigned", "disallow_public_id", "name"))
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def root_folders(**options):
|
||||
return call_api("get", ["folders"], {}, **options)
|
||||
|
||||
|
||||
def subfolders(of_folder_path, **options):
|
||||
return call_api("get", ["folders", of_folder_path], {}, **options)
|
||||
|
||||
|
||||
def restore(public_ids, **options):
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
upload_type = options.pop("type", "upload")
|
||||
uri = ["resources", resource_type, upload_type, "restore"]
|
||||
params = dict(public_ids=public_ids)
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def upload_mappings(**options):
|
||||
uri = ["upload_mappings"]
|
||||
return call_api("get", uri, only(options, "next_cursor", "max_results"), **options)
|
||||
|
||||
|
||||
def upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
return call_api("get", uri, params, **options)
|
||||
|
||||
|
||||
def delete_upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
return call_api("delete", uri, params, **options)
|
||||
|
||||
|
||||
def update_upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
params.update(only(options, "template"))
|
||||
return call_api("put", uri, params, **options)
|
||||
|
||||
|
||||
def create_upload_mapping(name, **options):
|
||||
uri = ["upload_mappings"]
|
||||
params = dict(folder=name)
|
||||
params.update(only(options, "template"))
|
||||
return call_api("post", uri, params, **options)
|
||||
|
||||
|
||||
def list_streaming_profiles(**options):
|
||||
uri = ["streaming_profiles"]
|
||||
return call_api('GET', uri, {}, **options)
|
||||
|
||||
|
||||
def get_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles", name]
|
||||
return call_api('GET', uri, {}, **options)
|
||||
|
||||
|
||||
def delete_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles", name]
|
||||
return call_api('DELETE', uri, {}, **options)
|
||||
|
||||
|
||||
def create_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles"]
|
||||
params = __prepare_streaming_profile_params(**options)
|
||||
params["name"] = name
|
||||
return call_api('POST', uri, params, **options)
|
||||
|
||||
|
||||
def update_streaming_profile(name, **options):
|
||||
uri = ["streaming_profiles", name]
|
||||
params = __prepare_streaming_profile_params(**options)
|
||||
return call_api('PUT', uri, params, **options)
|
||||
|
||||
|
||||
def call_json_api(method, uri, jsonBody, **options):
|
||||
logger.debug(jsonBody)
|
||||
data = json.dumps(jsonBody).encode('utf-8')
|
||||
return _call_api(method, uri, body=data, headers={'Content-Type': 'application/json'}, **options)
|
||||
|
||||
|
||||
def call_api(method, uri, params, **options):
|
||||
return _call_api(method, uri, params=params, **options)
|
||||
|
||||
|
||||
def _call_api(method, uri, params=None, body=None, headers=None, **options):
|
||||
prefix = options.pop("upload_prefix",
|
||||
cloudinary.config().upload_prefix) or "https://api.cloudinary.com"
|
||||
cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name)
|
||||
if not cloud_name: raise Exception("Must supply cloud_name")
|
||||
api_key = options.pop("api_key", cloudinary.config().api_key)
|
||||
if not api_key: raise Exception("Must supply api_key")
|
||||
api_secret = options.pop("api_secret", cloudinary.config().api_secret)
|
||||
if not cloud_name: raise Exception("Must supply api_secret")
|
||||
api_url = "/".join([prefix, "v1_1", cloud_name] + uri)
|
||||
|
||||
processed_params = None
|
||||
if isinstance(params, dict):
|
||||
processed_params = {}
|
||||
for key, value in params.items():
|
||||
if isinstance(value, list):
|
||||
value_list = {"{}[{}]".format(key, i): i_value for i, i_value in enumerate(value)}
|
||||
processed_params.update(value_list)
|
||||
elif value:
|
||||
processed_params[key] = value
|
||||
|
||||
# Add authentication
|
||||
req_headers = urllib3.make_headers(
|
||||
basic_auth="{0}:{1}".format(api_key, api_secret),
|
||||
user_agent=cloudinary.get_user_agent()
|
||||
)
|
||||
if headers is not None:
|
||||
req_headers.update(headers)
|
||||
kw = {}
|
||||
if 'timeout' in options:
|
||||
kw['timeout'] = options['timeout']
|
||||
if body is not None:
|
||||
kw['body'] = body
|
||||
try:
|
||||
response = _http.request(method.upper(), api_url, processed_params, req_headers, **kw)
|
||||
body = response.data
|
||||
except HTTPError as e:
|
||||
raise GeneralError("Unexpected error {0}", e.message)
|
||||
except socket.error as e:
|
||||
raise GeneralError("Socket Error: %s" % (str(e)))
|
||||
|
||||
try:
|
||||
result = json.loads(body.decode('utf-8'))
|
||||
except Exception as e:
|
||||
# Error is parsing json
|
||||
raise GeneralError("Error parsing server response (%d) - %s. Got - %s" % (response.status, body, e))
|
||||
|
||||
if "error" in result:
|
||||
exception_class = EXCEPTION_CODES.get(response.status) or Exception
|
||||
exception_class = exception_class
|
||||
raise exception_class("Error {0} - {1}".format(response.status, result["error"]["message"]))
|
||||
|
||||
return Response(result, response)
|
||||
|
||||
|
||||
def only(source, *keys):
|
||||
return {key: source[key] for key in keys if key in source}
|
||||
|
||||
|
||||
def transformation_string(transformation):
|
||||
if isinstance(transformation, string_types):
|
||||
return transformation
|
||||
else:
|
||||
return cloudinary.utils.generate_transformation_string(**transformation)[0]
|
||||
|
||||
|
||||
def __prepare_streaming_profile_params(**options):
|
||||
params = only(options, "display_name")
|
||||
if "representations" in options:
|
||||
representations = [{"transformation": transformation_string(trans)} for trans in options["representations"]]
|
||||
params["representations"] = json.dumps(representations)
|
||||
return params
|
||||
|
||||
def __delete_resource_params(options, **params):
|
||||
p = dict(transformations=utils.build_eager(options.get('transformations')),
|
||||
**only(options, "keep_original", "next_cursor", "invalidate"))
|
||||
p.update(params)
|
||||
return p
|
47
lib/cloudinary/auth_token.py
Normal file
47
lib/cloudinary/auth_token.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import hashlib
|
||||
import hmac
|
||||
import re
|
||||
import time
|
||||
from binascii import a2b_hex
|
||||
from cloudinary.compat import quote_plus
|
||||
|
||||
AUTH_TOKEN_NAME = "__cld_token__"
|
||||
|
||||
|
||||
|
||||
def generate(url=None, acl=None, start_time=None, duration=None, expiration=None, ip=None, key=None,
|
||||
token_name=AUTH_TOKEN_NAME):
|
||||
|
||||
if expiration is None:
|
||||
if duration is not None:
|
||||
start = start_time if start_time is not None else int(time.mktime(time.gmtime()))
|
||||
expiration = start + duration
|
||||
else:
|
||||
raise Exception("Must provide either expiration or duration")
|
||||
|
||||
token_parts = []
|
||||
if ip is not None: token_parts.append("ip=" + ip)
|
||||
if start_time is not None: token_parts.append("st=%d" % start_time)
|
||||
token_parts.append("exp=%d" % expiration)
|
||||
if acl is not None: token_parts.append("acl=%s" % _escape_to_lower(acl))
|
||||
to_sign = list(token_parts)
|
||||
if url is not None:
|
||||
to_sign.append("url=%s" % _escape_to_lower(url))
|
||||
auth = _digest("~".join(to_sign), key)
|
||||
token_parts.append("hmac=%s" % auth)
|
||||
return "%(token_name)s=%(token)s" % {"token_name": token_name, "token": "~".join(token_parts)}
|
||||
|
||||
|
||||
def _digest(message, key):
|
||||
bin_key = a2b_hex(key)
|
||||
return hmac.new(bin_key, message.encode('utf-8'), hashlib.sha256).hexdigest()
|
||||
|
||||
|
||||
def _escape_to_lower(url):
|
||||
escaped_url = quote_plus(url)
|
||||
|
||||
def toLowercase(match):
|
||||
return match.group(0).lower()
|
||||
|
||||
escaped_url = re.sub(r'%..', toLowercase, escaped_url)
|
||||
return escaped_url
|
34
lib/cloudinary/compat.py
Normal file
34
lib/cloudinary/compat.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# Copyright Cloudinary
|
||||
import six.moves.urllib.parse
|
||||
urlencode = six.moves.urllib.parse.urlencode
|
||||
unquote = six.moves.urllib.parse.unquote
|
||||
urlparse = six.moves.urllib.parse.urlparse
|
||||
parse_qs = six.moves.urllib.parse.parse_qs
|
||||
parse_qsl = six.moves.urllib.parse.parse_qsl
|
||||
quote_plus = six.moves.urllib.parse.quote_plus
|
||||
httplib = six.moves.http_client
|
||||
from six import PY3, string_types, StringIO, BytesIO
|
||||
urllib2 = six.moves.urllib.request
|
||||
NotConnected = six.moves.http_client.NotConnected
|
||||
|
||||
if PY3:
|
||||
to_bytes = lambda s: s.encode('utf8')
|
||||
to_bytearray = lambda s: bytearray(s, 'utf8')
|
||||
to_string = lambda b: b.decode('utf8')
|
||||
|
||||
else:
|
||||
to_bytes = str
|
||||
to_bytearray = str
|
||||
to_string = str
|
||||
|
||||
try:
|
||||
cldrange = xrange
|
||||
except NameError:
|
||||
def cldrange(*args, **kwargs):
|
||||
return iter(range(*args, **kwargs))
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
134
lib/cloudinary/forms.py
Normal file
134
lib/cloudinary/forms.py
Normal file
@@ -0,0 +1,134 @@
|
||||
from django import forms
|
||||
from cloudinary import CloudinaryResource
|
||||
import cloudinary.uploader
|
||||
import cloudinary.utils
|
||||
import re
|
||||
import json
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
def cl_init_js_callbacks(form, request):
|
||||
for field in form.fields.values():
|
||||
if isinstance(field, CloudinaryJsFileField):
|
||||
field.enable_callback(request)
|
||||
|
||||
|
||||
class CloudinaryInput(forms.TextInput):
|
||||
input_type = 'file'
|
||||
|
||||
def render(self, name, value, attrs=None):
|
||||
attrs = self.build_attrs(attrs)
|
||||
options = attrs.get('options', {})
|
||||
attrs["options"] = ''
|
||||
|
||||
params = cloudinary.utils.build_upload_params(**options)
|
||||
if options.get("unsigned"):
|
||||
params = cloudinary.utils.cleanup_params(params)
|
||||
else:
|
||||
params = cloudinary.utils.sign_request(params, options)
|
||||
|
||||
if 'resource_type' not in options: options['resource_type'] = 'auto'
|
||||
cloudinary_upload_url = cloudinary.utils.cloudinary_api_url("upload", **options)
|
||||
|
||||
attrs["data-url"] = cloudinary_upload_url
|
||||
attrs["data-form-data"] = json.dumps(params)
|
||||
attrs["data-cloudinary-field"] = name
|
||||
chunk_size = options.get("chunk_size", None)
|
||||
if chunk_size: attrs["data-max-chunk-size"] = chunk_size
|
||||
attrs["class"] = " ".join(["cloudinary-fileupload", attrs.get("class", "")])
|
||||
|
||||
widget = super(CloudinaryInput, self).render("file", None, attrs=attrs)
|
||||
if value:
|
||||
if isinstance(value, CloudinaryResource):
|
||||
value_string = value.get_presigned()
|
||||
else:
|
||||
value_string = value
|
||||
widget += forms.HiddenInput().render(name, value_string)
|
||||
return widget
|
||||
|
||||
|
||||
class CloudinaryJsFileField(forms.Field):
|
||||
default_error_messages = {
|
||||
'required': _(u"No file selected!")
|
||||
}
|
||||
|
||||
def __init__(self, attrs=None, options=None, autosave=True, *args, **kwargs):
|
||||
if attrs is None: attrs = {}
|
||||
if options is None: options = {}
|
||||
self.autosave = autosave
|
||||
attrs = attrs.copy()
|
||||
attrs["options"] = options.copy()
|
||||
|
||||
field_options = {'widget': CloudinaryInput(attrs=attrs)}
|
||||
field_options.update(kwargs)
|
||||
super(CloudinaryJsFileField, self).__init__(*args, **field_options)
|
||||
|
||||
def enable_callback(self, request):
|
||||
from django.contrib.staticfiles.storage import staticfiles_storage
|
||||
self.widget.attrs["options"]["callback"] = request.build_absolute_uri(
|
||||
staticfiles_storage.url("html/cloudinary_cors.html"))
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert to CloudinaryResource"""
|
||||
if not value: return None
|
||||
m = re.search(r'^([^/]+)/([^/]+)/v(\d+)/([^#]+)#([^/]+)$', value)
|
||||
if not m:
|
||||
raise forms.ValidationError("Invalid format")
|
||||
resource_type = m.group(1)
|
||||
upload_type = m.group(2)
|
||||
version = m.group(3)
|
||||
filename = m.group(4)
|
||||
signature = m.group(5)
|
||||
m = re.search(r'(.*)\.(.*)', filename)
|
||||
if not m:
|
||||
raise forms.ValidationError("Invalid file name")
|
||||
public_id = m.group(1)
|
||||
image_format = m.group(2)
|
||||
return CloudinaryResource(public_id,
|
||||
format=image_format,
|
||||
version=version,
|
||||
signature=signature,
|
||||
type=upload_type,
|
||||
resource_type=resource_type)
|
||||
|
||||
def validate(self, value):
|
||||
"""Validate the signature"""
|
||||
# Use the parent's handling of required fields, etc.
|
||||
super(CloudinaryJsFileField, self).validate(value)
|
||||
if not value: return
|
||||
if not value.validate():
|
||||
raise forms.ValidationError("Signature mismatch")
|
||||
|
||||
|
||||
class CloudinaryUnsignedJsFileField(CloudinaryJsFileField):
|
||||
def __init__(self, upload_preset, attrs=None, options=None, autosave=True, *args, **kwargs):
|
||||
if attrs is None:
|
||||
attrs = {}
|
||||
if options is None:
|
||||
options = {}
|
||||
options = options.copy()
|
||||
options.update({"unsigned": True, "upload_preset": upload_preset})
|
||||
super(CloudinaryUnsignedJsFileField, self).__init__(attrs, options, autosave, *args, **kwargs)
|
||||
|
||||
|
||||
class CloudinaryFileField(forms.FileField):
|
||||
my_default_error_messages = {
|
||||
'required': _(u"No file selected!")
|
||||
}
|
||||
default_error_messages = forms.FileField.default_error_messages.copy()
|
||||
default_error_messages.update(my_default_error_messages)
|
||||
|
||||
def __init__(self, options=None, autosave=True, *args, **kwargs):
|
||||
self.autosave = autosave
|
||||
self.options = options or {}
|
||||
super(CloudinaryFileField, self).__init__(*args, **kwargs)
|
||||
|
||||
def to_python(self, value):
|
||||
"""Upload and convert to CloudinaryResource"""
|
||||
value = super(CloudinaryFileField, self).to_python(value)
|
||||
if not value:
|
||||
return None
|
||||
if self.autosave:
|
||||
return cloudinary.uploader.upload_image(value, **self.options)
|
||||
else:
|
||||
return value
|
121
lib/cloudinary/models.py
Normal file
121
lib/cloudinary/models.py
Normal file
@@ -0,0 +1,121 @@
|
||||
import re
|
||||
|
||||
|
||||
from cloudinary import CloudinaryResource, forms, uploader
|
||||
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from django.db import models
|
||||
|
||||
# Add introspection rules for South, if it's installed.
|
||||
try:
|
||||
from south.modelsinspector import add_introspection_rules
|
||||
add_introspection_rules([], ["^cloudinary.models.CloudinaryField"])
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
CLOUDINARY_FIELD_DB_RE = r'(?:(?P<resource_type>image|raw|video)/(?P<type>upload|private|authenticated)/)?(?:v(?P<version>\d+)/)?(?P<public_id>.*?)(\.(?P<format>[^.]+))?$'
|
||||
|
||||
|
||||
# Taken from six - https://pythonhosted.org/six/
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(meta):
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
class CloudinaryField(models.Field):
|
||||
description = "A resource stored in Cloudinary"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
options = {'max_length': 255}
|
||||
self.default_form_class = kwargs.pop("default_form_class", forms.CloudinaryFileField)
|
||||
options.update(kwargs)
|
||||
self.type = options.pop("type", "upload")
|
||||
self.resource_type = options.pop("resource_type", "image")
|
||||
self.width_field = options.pop("width_field", None)
|
||||
self.height_field = options.pop("height_field", None)
|
||||
super(CloudinaryField, self).__init__(*args, **options)
|
||||
|
||||
def get_internal_type(self):
|
||||
return 'CharField'
|
||||
|
||||
def value_to_string(self, obj):
|
||||
# We need to support both legacy `_get_val_from_obj` and new `value_from_object` models.Field methods.
|
||||
# It would be better to wrap it with try -> except AttributeError -> fallback to legacy.
|
||||
# Unfortunately, we can catch AttributeError exception from `value_from_object` function itself.
|
||||
# Parsing exception string is an overkill here, that's why we check for attribute existence
|
||||
|
||||
if hasattr(self, 'value_from_object'):
|
||||
value = self.value_from_object(obj)
|
||||
else: # fallback for legacy django versions
|
||||
value = self._get_val_from_obj(obj)
|
||||
|
||||
return self.get_prep_value(value)
|
||||
|
||||
def parse_cloudinary_resource(self, value):
|
||||
m = re.match(CLOUDINARY_FIELD_DB_RE, value)
|
||||
resource_type = m.group('resource_type') or self.resource_type
|
||||
upload_type = m.group('type') or self.type
|
||||
return CloudinaryResource(
|
||||
type=upload_type,
|
||||
resource_type=resource_type,
|
||||
version=m.group('version'),
|
||||
public_id=m.group('public_id'),
|
||||
format=m.group('format')
|
||||
)
|
||||
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
if value is None:
|
||||
return value
|
||||
return self.parse_cloudinary_resource(value)
|
||||
|
||||
def to_python(self, value):
|
||||
if isinstance(value, CloudinaryResource):
|
||||
return value
|
||||
elif isinstance(value, UploadedFile):
|
||||
return value
|
||||
elif value is None:
|
||||
return value
|
||||
else:
|
||||
return self.parse_cloudinary_resource(value)
|
||||
|
||||
def upload_options_with_filename(self, model_instance, filename):
|
||||
return self.upload_options(model_instance)
|
||||
|
||||
def upload_options(self, model_instance):
|
||||
return {}
|
||||
|
||||
def pre_save(self, model_instance, add):
|
||||
value = super(CloudinaryField, self).pre_save(model_instance, add)
|
||||
if isinstance(value, UploadedFile):
|
||||
options = {"type": self.type, "resource_type": self.resource_type}
|
||||
options.update(self.upload_options_with_filename(model_instance, value.name))
|
||||
instance_value = uploader.upload_resource(value, **options)
|
||||
setattr(model_instance, self.attname, instance_value)
|
||||
if self.width_field:
|
||||
setattr(model_instance, self.width_field, instance_value.metadata['width'])
|
||||
if self.height_field:
|
||||
setattr(model_instance, self.height_field, instance_value.metadata['height'])
|
||||
return self.get_prep_value(instance_value)
|
||||
else:
|
||||
return value
|
||||
|
||||
def get_prep_value(self, value):
|
||||
if not value:
|
||||
return self.get_default()
|
||||
if isinstance(value, CloudinaryResource):
|
||||
return value.get_prep_value()
|
||||
else:
|
||||
return value
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
options = {"type": self.type, "resource_type": self.resource_type}
|
||||
options.update(kwargs.pop('options', {}))
|
||||
defaults = {'form_class': self.default_form_class, 'options': options, 'autosave': False}
|
||||
defaults.update(kwargs)
|
||||
return super(CloudinaryField, self).formfield(**defaults)
|
34
lib/cloudinary/poster/__init__.py
Normal file
34
lib/cloudinary/poster/__init__.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# MIT licensed code copied from https://bitbucket.org/chrisatlee/poster
|
||||
#
|
||||
# Copyright (c) 2011 Chris AtLee
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
"""poster module
|
||||
|
||||
Support for streaming HTTP uploads, and multipart/form-data encoding
|
||||
|
||||
```poster.version``` is a 3-tuple of integers representing the version number.
|
||||
New releases of poster will always have a version number that compares greater
|
||||
than an older version of poster.
|
||||
New in version 0.6."""
|
||||
|
||||
import cloudinary.poster.streaminghttp
|
||||
import cloudinary.poster.encode
|
||||
|
||||
version = (0, 8, 2) # Thanks JP!
|
447
lib/cloudinary/poster/encode.py
Normal file
447
lib/cloudinary/poster/encode.py
Normal file
@@ -0,0 +1,447 @@
|
||||
# MIT licensed code copied from https://bitbucket.org/chrisatlee/poster
|
||||
"""multipart/form-data encoding module
|
||||
|
||||
This module provides functions that faciliate encoding name/value pairs
|
||||
as multipart/form-data suitable for a HTTP POST or PUT request.
|
||||
|
||||
multipart/form-data is the standard way to upload files over HTTP"""
|
||||
|
||||
__all__ = ['gen_boundary', 'encode_and_quote', 'MultipartParam',
|
||||
'encode_string', 'encode_file_header', 'get_body_size', 'get_headers',
|
||||
'multipart_encode']
|
||||
|
||||
try:
|
||||
from io import UnsupportedOperation
|
||||
except ImportError:
|
||||
UnsupportedOperation = None
|
||||
|
||||
try:
|
||||
import uuid
|
||||
def gen_boundary():
|
||||
"""Returns a random string to use as the boundary for a message"""
|
||||
return uuid.uuid4().hex
|
||||
except ImportError:
|
||||
import random, sha
|
||||
def gen_boundary():
|
||||
"""Returns a random string to use as the boundary for a message"""
|
||||
bits = random.getrandbits(160)
|
||||
return sha.new(str(bits)).hexdigest()
|
||||
|
||||
import re, os, mimetypes
|
||||
from cloudinary.compat import (PY3, string_types, to_bytes, to_string,
|
||||
to_bytearray, quote_plus, advance_iterator)
|
||||
try:
|
||||
from email.header import Header
|
||||
except ImportError:
|
||||
# Python 2.4
|
||||
from email.Header import Header
|
||||
|
||||
if PY3:
|
||||
def encode_and_quote(data):
|
||||
if data is None:
|
||||
return None
|
||||
return quote_plus(to_bytes(data))
|
||||
|
||||
else:
|
||||
def encode_and_quote(data):
|
||||
"""If ``data`` is unicode, return quote_plus(data.encode("utf-8")) otherwise return quote_plus(data)"""
|
||||
if data is None:
|
||||
return None
|
||||
|
||||
if isinstance(data, unicode):
|
||||
data = data.encode("utf-8")
|
||||
return quote_plus(data)
|
||||
|
||||
if PY3:
|
||||
def _strify(s):
|
||||
if s is None:
|
||||
return None
|
||||
elif isinstance(s, bytes):
|
||||
return s
|
||||
else:
|
||||
try:
|
||||
return to_bytes(s)
|
||||
except AttributeError:
|
||||
return to_bytes(str(s))
|
||||
else:
|
||||
def _strify(s):
|
||||
"""If s is a unicode string, encode it to UTF-8 and return the results, otherwise return str(s), or None if s is None"""
|
||||
if s is None:
|
||||
return None
|
||||
if isinstance(s, unicode):
|
||||
return s.encode("utf-8")
|
||||
return str(s)
|
||||
|
||||
class MultipartParam(object):
|
||||
"""Represents a single parameter in a multipart/form-data request
|
||||
|
||||
``name`` is the name of this parameter.
|
||||
|
||||
If ``value`` is set, it must be a string or unicode object to use as the
|
||||
data for this parameter.
|
||||
|
||||
If ``filename`` is set, it is what to say that this parameter's filename
|
||||
is. Note that this does not have to be the actual filename any local file.
|
||||
|
||||
If ``filetype`` is set, it is used as the Content-Type for this parameter.
|
||||
If unset it defaults to "text/plain; charset=utf8"
|
||||
|
||||
If ``filesize`` is set, it specifies the length of the file ``fileobj``
|
||||
|
||||
If ``fileobj`` is set, it must be a file-like object that supports
|
||||
.read().
|
||||
|
||||
Both ``value`` and ``fileobj`` must not be set, doing so will
|
||||
raise a ValueError assertion.
|
||||
|
||||
If ``fileobj`` is set, and ``filesize`` is not specified, then
|
||||
the file's size will be determined first by stat'ing ``fileobj``'s
|
||||
file descriptor, and if that fails, by seeking to the end of the file,
|
||||
recording the current position as the size, and then by seeking back to the
|
||||
beginning of the file.
|
||||
|
||||
``cb`` is a callable which will be called from iter_encode with (self,
|
||||
current, total), representing the current parameter, current amount
|
||||
transferred, and the total size.
|
||||
"""
|
||||
def __init__(self, name, value=None, filename=None, filetype=None,
|
||||
filesize=None, fileobj=None, cb=None):
|
||||
self.name = Header(name).encode()
|
||||
self.value = _strify(value)
|
||||
if filename is None:
|
||||
self.filename = None
|
||||
else:
|
||||
if PY3:
|
||||
byte_filename = filename.encode("ascii", "xmlcharrefreplace")
|
||||
self.filename = to_string(byte_filename)
|
||||
encoding = 'unicode_escape'
|
||||
else:
|
||||
if isinstance(filename, unicode):
|
||||
# Encode with XML entities
|
||||
self.filename = filename.encode("ascii", "xmlcharrefreplace")
|
||||
else:
|
||||
self.filename = str(filename)
|
||||
encoding = 'string_escape'
|
||||
self.filename = self.filename.encode(encoding).replace(to_bytes('"'), to_bytes('\\"'))
|
||||
self.filetype = _strify(filetype)
|
||||
|
||||
self.filesize = filesize
|
||||
self.fileobj = fileobj
|
||||
self.cb = cb
|
||||
|
||||
if self.value is not None and self.fileobj is not None:
|
||||
raise ValueError("Only one of value or fileobj may be specified")
|
||||
|
||||
if fileobj is not None and filesize is None:
|
||||
# Try and determine the file size
|
||||
try:
|
||||
self.filesize = os.fstat(fileobj.fileno()).st_size
|
||||
except (OSError, AttributeError, UnsupportedOperation):
|
||||
try:
|
||||
fileobj.seek(0, 2)
|
||||
self.filesize = fileobj.tell()
|
||||
fileobj.seek(0)
|
||||
except:
|
||||
raise ValueError("Could not determine filesize")
|
||||
|
||||
def __cmp__(self, other):
|
||||
attrs = ['name', 'value', 'filename', 'filetype', 'filesize', 'fileobj']
|
||||
myattrs = [getattr(self, a) for a in attrs]
|
||||
oattrs = [getattr(other, a) for a in attrs]
|
||||
return cmp(myattrs, oattrs)
|
||||
|
||||
def reset(self):
|
||||
if self.fileobj is not None:
|
||||
self.fileobj.seek(0)
|
||||
elif self.value is None:
|
||||
raise ValueError("Don't know how to reset this parameter")
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, paramname, filename):
|
||||
"""Returns a new MultipartParam object constructed from the local
|
||||
file at ``filename``.
|
||||
|
||||
``filesize`` is determined by os.path.getsize(``filename``)
|
||||
|
||||
``filetype`` is determined by mimetypes.guess_type(``filename``)[0]
|
||||
|
||||
``filename`` is set to os.path.basename(``filename``)
|
||||
"""
|
||||
|
||||
return cls(paramname, filename=os.path.basename(filename),
|
||||
filetype=mimetypes.guess_type(filename)[0],
|
||||
filesize=os.path.getsize(filename),
|
||||
fileobj=open(filename, "rb"))
|
||||
|
||||
@classmethod
|
||||
def from_params(cls, params):
|
||||
"""Returns a list of MultipartParam objects from a sequence of
|
||||
name, value pairs, MultipartParam instances,
|
||||
or from a mapping of names to values
|
||||
|
||||
The values may be strings or file objects, or MultipartParam objects.
|
||||
MultipartParam object names must match the given names in the
|
||||
name,value pairs or mapping, if applicable."""
|
||||
if hasattr(params, 'items'):
|
||||
params = params.items()
|
||||
|
||||
retval = []
|
||||
for item in params:
|
||||
if isinstance(item, cls):
|
||||
retval.append(item)
|
||||
continue
|
||||
name, value = item
|
||||
if isinstance(value, cls):
|
||||
assert value.name == name
|
||||
retval.append(value)
|
||||
continue
|
||||
if hasattr(value, 'read'):
|
||||
# Looks like a file object
|
||||
filename = getattr(value, 'name', None)
|
||||
if filename is not None:
|
||||
filetype = mimetypes.guess_type(filename)[0]
|
||||
else:
|
||||
filetype = None
|
||||
|
||||
retval.append(cls(name=name, filename=filename,
|
||||
filetype=filetype, fileobj=value))
|
||||
else:
|
||||
retval.append(cls(name, value))
|
||||
return retval
|
||||
|
||||
def encode_hdr(self, boundary):
|
||||
"""Returns the header of the encoding of this parameter"""
|
||||
boundary = encode_and_quote(boundary)
|
||||
|
||||
headers = ["--%s" % boundary]
|
||||
|
||||
if self.filename:
|
||||
disposition = 'form-data; name="%s"; filename="%s"' % (self.name,
|
||||
to_string(self.filename))
|
||||
else:
|
||||
disposition = 'form-data; name="%s"' % self.name
|
||||
|
||||
headers.append("Content-Disposition: %s" % disposition)
|
||||
|
||||
if self.filetype:
|
||||
filetype = to_string(self.filetype)
|
||||
else:
|
||||
filetype = "text/plain; charset=utf-8"
|
||||
|
||||
headers.append("Content-Type: %s" % filetype)
|
||||
|
||||
headers.append("")
|
||||
headers.append("")
|
||||
|
||||
return "\r\n".join(headers)
|
||||
|
||||
def encode(self, boundary):
|
||||
"""Returns the string encoding of this parameter"""
|
||||
if self.value is None:
|
||||
value = self.fileobj.read()
|
||||
else:
|
||||
value = self.value
|
||||
|
||||
if re.search(to_bytes("^--%s$" % re.escape(boundary)), value, re.M):
|
||||
raise ValueError("boundary found in encoded string")
|
||||
|
||||
return to_bytes(self.encode_hdr(boundary)) + value + b"\r\n"
|
||||
|
||||
def iter_encode(self, boundary, blocksize=4096):
|
||||
"""Yields the encoding of this parameter
|
||||
If self.fileobj is set, then blocks of ``blocksize`` bytes are read and
|
||||
yielded."""
|
||||
total = self.get_size(boundary)
|
||||
current = 0
|
||||
if self.value is not None:
|
||||
block = self.encode(boundary)
|
||||
current += len(block)
|
||||
yield block
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
else:
|
||||
block = to_bytes(self.encode_hdr(boundary))
|
||||
current += len(block)
|
||||
yield block
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
last_block = to_bytearray("")
|
||||
encoded_boundary = "--%s" % encode_and_quote(boundary)
|
||||
boundary_exp = re.compile(to_bytes("^%s$" % re.escape(encoded_boundary)),
|
||||
re.M)
|
||||
while True:
|
||||
block = self.fileobj.read(blocksize)
|
||||
if not block:
|
||||
current += 2
|
||||
yield to_bytes("\r\n")
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
break
|
||||
last_block += block
|
||||
if boundary_exp.search(last_block):
|
||||
raise ValueError("boundary found in file data")
|
||||
last_block = last_block[-len(to_bytes(encoded_boundary))-2:]
|
||||
current += len(block)
|
||||
yield block
|
||||
if self.cb:
|
||||
self.cb(self, current, total)
|
||||
|
||||
def get_size(self, boundary):
|
||||
"""Returns the size in bytes that this param will be when encoded
|
||||
with the given boundary."""
|
||||
if self.filesize is not None:
|
||||
valuesize = self.filesize
|
||||
else:
|
||||
valuesize = len(self.value)
|
||||
|
||||
return len(self.encode_hdr(boundary)) + 2 + valuesize
|
||||
|
||||
def encode_string(boundary, name, value):
|
||||
"""Returns ``name`` and ``value`` encoded as a multipart/form-data
|
||||
variable. ``boundary`` is the boundary string used throughout
|
||||
a single request to separate variables."""
|
||||
|
||||
return MultipartParam(name, value).encode(boundary)
|
||||
|
||||
def encode_file_header(boundary, paramname, filesize, filename=None,
|
||||
filetype=None):
|
||||
"""Returns the leading data for a multipart/form-data field that contains
|
||||
file data.
|
||||
|
||||
``boundary`` is the boundary string used throughout a single request to
|
||||
separate variables.
|
||||
|
||||
``paramname`` is the name of the variable in this request.
|
||||
|
||||
``filesize`` is the size of the file data.
|
||||
|
||||
``filename`` if specified is the filename to give to this field. This
|
||||
field is only useful to the server for determining the original filename.
|
||||
|
||||
``filetype`` if specified is the MIME type of this file.
|
||||
|
||||
The actual file data should be sent after this header has been sent.
|
||||
"""
|
||||
|
||||
return MultipartParam(paramname, filesize=filesize, filename=filename,
|
||||
filetype=filetype).encode_hdr(boundary)
|
||||
|
||||
def get_body_size(params, boundary):
|
||||
"""Returns the number of bytes that the multipart/form-data encoding
|
||||
of ``params`` will be."""
|
||||
size = sum(p.get_size(boundary) for p in MultipartParam.from_params(params))
|
||||
return size + len(boundary) + 6
|
||||
|
||||
def get_headers(params, boundary):
|
||||
"""Returns a dictionary with Content-Type and Content-Length headers
|
||||
for the multipart/form-data encoding of ``params``."""
|
||||
headers = {}
|
||||
boundary = quote_plus(boundary)
|
||||
headers['Content-Type'] = "multipart/form-data; boundary=%s" % boundary
|
||||
headers['Content-Length'] = str(get_body_size(params, boundary))
|
||||
return headers
|
||||
|
||||
class multipart_yielder:
|
||||
def __init__(self, params, boundary, cb):
|
||||
self.params = params
|
||||
self.boundary = boundary
|
||||
self.cb = cb
|
||||
|
||||
self.i = 0
|
||||
self.p = None
|
||||
self.param_iter = None
|
||||
self.current = 0
|
||||
self.total = get_body_size(params, boundary)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
return self.next()
|
||||
|
||||
def next(self):
|
||||
"""generator function to yield multipart/form-data representation
|
||||
of parameters"""
|
||||
if self.param_iter is not None:
|
||||
try:
|
||||
block = advance_iterator(self.param_iter)
|
||||
self.current += len(block)
|
||||
if self.cb:
|
||||
self.cb(self.p, self.current, self.total)
|
||||
return block
|
||||
except StopIteration:
|
||||
self.p = None
|
||||
self.param_iter = None
|
||||
|
||||
if self.i is None:
|
||||
raise StopIteration
|
||||
elif self.i >= len(self.params):
|
||||
self.param_iter = None
|
||||
self.p = None
|
||||
self.i = None
|
||||
block = to_bytes("--%s--\r\n" % self.boundary)
|
||||
self.current += len(block)
|
||||
if self.cb:
|
||||
self.cb(self.p, self.current, self.total)
|
||||
return block
|
||||
|
||||
self.p = self.params[self.i]
|
||||
self.param_iter = self.p.iter_encode(self.boundary)
|
||||
self.i += 1
|
||||
return advance_iterator(self)
|
||||
|
||||
def reset(self):
|
||||
self.i = 0
|
||||
self.current = 0
|
||||
for param in self.params:
|
||||
param.reset()
|
||||
|
||||
def multipart_encode(params, boundary=None, cb=None):
|
||||
"""Encode ``params`` as multipart/form-data.
|
||||
|
||||
``params`` should be a sequence of (name, value) pairs or MultipartParam
|
||||
objects, or a mapping of names to values.
|
||||
Values are either strings parameter values, or file-like objects to use as
|
||||
the parameter value. The file-like objects must support .read() and either
|
||||
.fileno() or both .seek() and .tell().
|
||||
|
||||
If ``boundary`` is set, then it as used as the MIME boundary. Otherwise
|
||||
a randomly generated boundary will be used. In either case, if the
|
||||
boundary string appears in the parameter values a ValueError will be
|
||||
raised.
|
||||
|
||||
If ``cb`` is set, it should be a callback which will get called as blocks
|
||||
of data are encoded. It will be called with (param, current, total),
|
||||
indicating the current parameter being encoded, the current amount encoded,
|
||||
and the total amount to encode.
|
||||
|
||||
Returns a tuple of `datagen`, `headers`, where `datagen` is a
|
||||
generator that will yield blocks of data that make up the encoded
|
||||
parameters, and `headers` is a dictionary with the assoicated
|
||||
Content-Type and Content-Length headers.
|
||||
|
||||
Examples:
|
||||
|
||||
>>> datagen, headers = multipart_encode( [("key", "value1"), ("key", "value2")] )
|
||||
>>> s = "".join(datagen)
|
||||
>>> assert "value2" in s and "value1" in s
|
||||
|
||||
>>> p = MultipartParam("key", "value2")
|
||||
>>> datagen, headers = multipart_encode( [("key", "value1"), p] )
|
||||
>>> s = "".join(datagen)
|
||||
>>> assert "value2" in s and "value1" in s
|
||||
|
||||
>>> datagen, headers = multipart_encode( {"key": "value1"} )
|
||||
>>> s = "".join(datagen)
|
||||
>>> assert "value2" not in s and "value1" in s
|
||||
|
||||
"""
|
||||
if boundary is None:
|
||||
boundary = gen_boundary()
|
||||
else:
|
||||
boundary = quote_plus(boundary)
|
||||
|
||||
headers = get_headers(params, boundary)
|
||||
params = MultipartParam.from_params(params)
|
||||
|
||||
return multipart_yielder(params, boundary, cb), headers
|
201
lib/cloudinary/poster/streaminghttp.py
Normal file
201
lib/cloudinary/poster/streaminghttp.py
Normal file
@@ -0,0 +1,201 @@
|
||||
# MIT licensed code copied from https://bitbucket.org/chrisatlee/poster
|
||||
"""Streaming HTTP uploads module.
|
||||
|
||||
This module extends the standard httplib and urllib2 objects so that
|
||||
iterable objects can be used in the body of HTTP requests.
|
||||
|
||||
In most cases all one should have to do is call :func:`register_openers()`
|
||||
to register the new streaming http handlers which will take priority over
|
||||
the default handlers, and then you can use iterable objects in the body
|
||||
of HTTP requests.
|
||||
|
||||
**N.B.** You must specify a Content-Length header if using an iterable object
|
||||
since there is no way to determine in advance the total size that will be
|
||||
yielded, and there is no way to reset an interator.
|
||||
|
||||
Example usage:
|
||||
|
||||
>>> from StringIO import StringIO
|
||||
>>> import urllib2, poster.streaminghttp
|
||||
|
||||
>>> opener = poster.streaminghttp.register_openers()
|
||||
|
||||
>>> s = "Test file data"
|
||||
>>> f = StringIO(s)
|
||||
|
||||
>>> req = urllib2.Request("http://localhost:5000", f,
|
||||
... {'Content-Length': str(len(s))})
|
||||
"""
|
||||
|
||||
import sys, socket
|
||||
from cloudinary.compat import httplib, urllib2, NotConnected
|
||||
|
||||
__all__ = ['StreamingHTTPConnection', 'StreamingHTTPRedirectHandler',
|
||||
'StreamingHTTPHandler', 'register_openers']
|
||||
|
||||
if hasattr(httplib, 'HTTPS'):
|
||||
__all__.extend(['StreamingHTTPSHandler', 'StreamingHTTPSConnection'])
|
||||
|
||||
class _StreamingHTTPMixin:
|
||||
"""Mixin class for HTTP and HTTPS connections that implements a streaming
|
||||
send method."""
|
||||
def send(self, value):
|
||||
"""Send ``value`` to the server.
|
||||
|
||||
``value`` can be a string object, a file-like object that supports
|
||||
a .read() method, or an iterable object that supports a .next()
|
||||
method.
|
||||
"""
|
||||
# Based on python 2.6's httplib.HTTPConnection.send()
|
||||
if self.sock is None:
|
||||
if self.auto_open:
|
||||
self.connect()
|
||||
else:
|
||||
raise NotConnected()
|
||||
|
||||
# send the data to the server. if we get a broken pipe, then close
|
||||
# the socket. we want to reconnect when somebody tries to send again.
|
||||
#
|
||||
# NOTE: we DO propagate the error, though, because we cannot simply
|
||||
# ignore the error... the caller will know if they can retry.
|
||||
if self.debuglevel > 0:
|
||||
print("send:", repr(value))
|
||||
try:
|
||||
blocksize = 8192
|
||||
if hasattr(value, 'read') :
|
||||
if hasattr(value, 'seek'):
|
||||
value.seek(0)
|
||||
if self.debuglevel > 0:
|
||||
print("sendIng a read()able")
|
||||
data = value.read(blocksize)
|
||||
while data:
|
||||
self.sock.sendall(data)
|
||||
data = value.read(blocksize)
|
||||
elif hasattr(value, 'next'):
|
||||
if hasattr(value, 'reset'):
|
||||
value.reset()
|
||||
if self.debuglevel > 0:
|
||||
print("sendIng an iterable")
|
||||
for data in value:
|
||||
self.sock.sendall(data)
|
||||
else:
|
||||
self.sock.sendall(value)
|
||||
except socket.error:
|
||||
e = sys.exc_info()[1]
|
||||
if e[0] == 32: # Broken pipe
|
||||
self.close()
|
||||
raise
|
||||
|
||||
class StreamingHTTPConnection(_StreamingHTTPMixin, httplib.HTTPConnection):
|
||||
"""Subclass of `httplib.HTTPConnection` that overrides the `send()` method
|
||||
to support iterable body objects"""
|
||||
|
||||
class StreamingHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||
"""Subclass of `urllib2.HTTPRedirectHandler` that overrides the
|
||||
`redirect_request` method to properly handle redirected POST requests
|
||||
|
||||
This class is required because python 2.5's HTTPRedirectHandler does
|
||||
not remove the Content-Type or Content-Length headers when requesting
|
||||
the new resource, but the body of the original request is not preserved.
|
||||
"""
|
||||
|
||||
handler_order = urllib2.HTTPRedirectHandler.handler_order - 1
|
||||
|
||||
# From python2.6 urllib2's HTTPRedirectHandler
|
||||
def redirect_request(self, req, fp, code, msg, headers, newurl):
|
||||
"""Return a Request or None in response to a redirect.
|
||||
|
||||
This is called by the http_error_30x methods when a
|
||||
redirection response is received. If a redirection should
|
||||
take place, return a new Request to allow http_error_30x to
|
||||
perform the redirect. Otherwise, raise HTTPError if no-one
|
||||
else should try to handle this url. Return None if you can't
|
||||
but another Handler might.
|
||||
"""
|
||||
m = req.get_method()
|
||||
if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
|
||||
or code in (301, 302, 303) and m == "POST"):
|
||||
# Strictly (according to RFC 2616), 301 or 302 in response
|
||||
# to a POST MUST NOT cause a redirection without confirmation
|
||||
# from the user (of urllib2, in this case). In practice,
|
||||
# essentially all clients do redirect in this case, so we
|
||||
# do the same.
|
||||
# be conciliant with URIs containing a space
|
||||
newurl = newurl.replace(' ', '%20')
|
||||
newheaders = dict((k, v) for k, v in req.headers.items()
|
||||
if k.lower() not in (
|
||||
"content-length", "content-type")
|
||||
)
|
||||
return urllib2.Request(newurl,
|
||||
headers=newheaders,
|
||||
origin_req_host=req.get_origin_req_host(),
|
||||
unverifiable=True)
|
||||
else:
|
||||
raise urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)
|
||||
|
||||
class StreamingHTTPHandler(urllib2.HTTPHandler):
|
||||
"""Subclass of `urllib2.HTTPHandler` that uses
|
||||
StreamingHTTPConnection as its http connection class."""
|
||||
|
||||
handler_order = urllib2.HTTPHandler.handler_order - 1
|
||||
|
||||
def http_open(self, req):
|
||||
"""Open a StreamingHTTPConnection for the given request"""
|
||||
return self.do_open(StreamingHTTPConnection, req)
|
||||
|
||||
def http_request(self, req):
|
||||
"""Handle a HTTP request. Make sure that Content-Length is specified
|
||||
if we're using an interable value"""
|
||||
# Make sure that if we're using an iterable object as the request
|
||||
# body, that we've also specified Content-Length
|
||||
if req.has_data():
|
||||
data = req.get_data()
|
||||
if hasattr(data, 'read') or hasattr(data, 'next'):
|
||||
if not req.has_header('Content-length'):
|
||||
raise ValueError(
|
||||
"No Content-Length specified for iterable body")
|
||||
return urllib2.HTTPHandler.do_request_(self, req)
|
||||
|
||||
if hasattr(httplib, 'HTTPS'):
|
||||
class StreamingHTTPSConnection(_StreamingHTTPMixin,
|
||||
httplib.HTTPSConnection):
|
||||
"""Subclass of `httplib.HTTSConnection` that overrides the `send()`
|
||||
method to support iterable body objects"""
|
||||
|
||||
class StreamingHTTPSHandler(urllib2.HTTPSHandler):
|
||||
"""Subclass of `urllib2.HTTPSHandler` that uses
|
||||
StreamingHTTPSConnection as its http connection class."""
|
||||
|
||||
handler_order = urllib2.HTTPSHandler.handler_order - 1
|
||||
|
||||
def https_open(self, req):
|
||||
return self.do_open(StreamingHTTPSConnection, req)
|
||||
|
||||
def https_request(self, req):
|
||||
# Make sure that if we're using an iterable object as the request
|
||||
# body, that we've also specified Content-Length
|
||||
if req.has_data():
|
||||
data = req.get_data()
|
||||
if hasattr(data, 'read') or hasattr(data, 'next'):
|
||||
if not req.has_header('Content-length'):
|
||||
raise ValueError(
|
||||
"No Content-Length specified for iterable body")
|
||||
return urllib2.HTTPSHandler.do_request_(self, req)
|
||||
|
||||
|
||||
def get_handlers():
|
||||
handlers = [StreamingHTTPHandler, StreamingHTTPRedirectHandler]
|
||||
if hasattr(httplib, "HTTPS"):
|
||||
handlers.append(StreamingHTTPSHandler)
|
||||
return handlers
|
||||
|
||||
def register_openers():
|
||||
"""Register the streaming http handlers in the global urllib2 default
|
||||
opener object.
|
||||
|
||||
Returns the created OpenerDirector object."""
|
||||
opener = urllib2.build_opener(*get_handlers())
|
||||
|
||||
urllib2.install_opener(opener)
|
||||
|
||||
return opener
|
59
lib/cloudinary/search.py
Normal file
59
lib/cloudinary/search.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import json
|
||||
from copy import deepcopy
|
||||
from . import api
|
||||
|
||||
|
||||
class Search:
|
||||
"""Build and execute a search query."""
|
||||
def __init__(self):
|
||||
self.query = {}
|
||||
|
||||
def expression(self, value):
|
||||
"""Specify the search query expression."""
|
||||
self.query["expression"] = value
|
||||
return self
|
||||
|
||||
def max_results(self, value):
|
||||
"""Set the max results to return"""
|
||||
self.query["max_results"] = value
|
||||
return self
|
||||
|
||||
def next_cursor(self, value):
|
||||
"""Get next page in the query using the ``next_cursor`` value from a previous invocation."""
|
||||
self.query["next_cursor"] = value
|
||||
return self
|
||||
|
||||
def sort_by(self, field_name, direction=None):
|
||||
"""Add a field to sort results by. If not provided, direction is ``desc``."""
|
||||
if direction is None:
|
||||
direction = 'desc'
|
||||
self._add("sort_by", {field_name: direction})
|
||||
return self
|
||||
|
||||
def aggregate(self, value):
|
||||
"""Aggregate field."""
|
||||
self._add("aggregate", value)
|
||||
return self
|
||||
|
||||
def with_field(self, value):
|
||||
"""Request an additional field in the result set."""
|
||||
self._add("with_field", value)
|
||||
return self
|
||||
|
||||
def to_json(self):
|
||||
return json.dumps(self.query)
|
||||
|
||||
def execute(self, **options):
|
||||
"""Execute the search and return results."""
|
||||
options["content_type"] = 'application/json'
|
||||
uri = ['resources','search']
|
||||
return api.call_json_api('post', uri, self.as_dict(), **options)
|
||||
|
||||
def _add(self, name, value):
|
||||
if name not in self.query:
|
||||
self.query[name] = []
|
||||
self.query[name].append(value)
|
||||
return self
|
||||
|
||||
def as_dict(self):
|
||||
return deepcopy(self.query)
|
43
lib/cloudinary/static/html/cloudinary_cors.html
Normal file
43
lib/cloudinary/static/html/cloudinary_cors.html
Normal file
@@ -0,0 +1,43 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
/*
|
||||
json2.js
|
||||
2011-10-19
|
||||
|
||||
Public Domain.
|
||||
|
||||
NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
||||
|
||||
See http://www.JSON.org/js.html
|
||||
|
||||
This code should be minified before deployment.
|
||||
See http://javascript.crockford.com/jsmin.html
|
||||
|
||||
USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
|
||||
NOT CONTROL.
|
||||
|
||||
*/
|
||||
var JSON;if(!JSON){JSON={}}(function(){function str(a,b){var c,d,e,f,g=gap,h,i=b[a];if(i&&typeof i==="object"&&typeof i.toJSON==="function"){i=i.toJSON(a)}if(typeof rep==="function"){i=rep.call(b,a,i)}switch(typeof i){case"string":return quote(i);case"number":return isFinite(i)?String(i):"null";case"boolean":case"null":return String(i);case"object":if(!i){return"null"}gap+=indent;h=[];if(Object.prototype.toString.apply(i)==="[object Array]"){f=i.length;for(c=0;c<f;c+=1){h[c]=str(c,i)||"null"}e=h.length===0?"[]":gap?"[\n"+gap+h.join(",\n"+gap)+"\n"+g+"]":"["+h.join(",")+"]";gap=g;return e}if(rep&&typeof rep==="object"){f=rep.length;for(c=0;c<f;c+=1){if(typeof rep[c]==="string"){d=rep[c];e=str(d,i);if(e){h.push(quote(d)+(gap?": ":":")+e)}}}}else{for(d in i){if(Object.prototype.hasOwnProperty.call(i,d)){e=str(d,i);if(e){h.push(quote(d)+(gap?": ":":")+e)}}}}e=h.length===0?"{}":gap?"{\n"+gap+h.join(",\n"+gap)+"\n"+g+"}":"{"+h.join(",")+"}";gap=g;return e}}function quote(a){escapable.lastIndex=0;return escapable.test(a)?'"'+a.replace(escapable,function(a){var b=meta[a];return typeof b==="string"?b:"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)})+'"':'"'+a+'"'}function f(a){return a<10?"0"+a:a}"use strict";if(typeof Date.prototype.toJSON!=="function"){Date.prototype.toJSON=function(a){return isFinite(this.valueOf())?this.getUTCFullYear()+"-"+f(this.getUTCMonth()+1)+"-"+f(this.getUTCDate())+"T"+f(this.getUTCHours())+":"+f(this.getUTCMinutes())+":"+f(this.getUTCSeconds())+"Z":null};String.prototype.toJSON=Number.prototype.toJSON=Boolean.prototype.toJSON=function(a){return this.valueOf()}}var cx=/[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,escapable=/[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,gap,indent,meta={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"},rep;if(typeof JSON.stringify!=="function"){JSON.stringify=function(a,b,c){var d;gap="";indent="";if(typeof c==="number"){for(d=0;d<c;d+=1){indent+=" "}}else if(typeof c==="string"){indent=c}rep=b;if(b&&typeof b!=="function"&&(typeof b!=="object"||typeof b.length!=="number")){throw new Error("JSON.stringify")}return str("",{"":a})}}if(typeof JSON.parse!=="function"){JSON.parse=function(text,reviver){function walk(a,b){var c,d,e=a[b];if(e&&typeof e==="object"){for(c in e){if(Object.prototype.hasOwnProperty.call(e,c)){d=walk(e,c);if(d!==undefined){e[c]=d}else{delete e[c]}}}}return reviver.call(a,b,e)}var j;text=String(text);cx.lastIndex=0;if(cx.test(text)){text=text.replace(cx,function(a){return"\\u"+("0000"+a.charCodeAt(0).toString(16)).slice(-4)})}if(/^[\],:{}\s]*$/.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,""))){j=eval("("+text+")");return typeof reviver==="function"?walk({"":j},""):j}throw new SyntaxError("JSON.parse")}}})()
|
||||
/* end of json2.js */
|
||||
;
|
||||
function parse(query) {
|
||||
var result = {};
|
||||
var params = query.split("&");
|
||||
for (var i = 0; i < params.length; i++) {
|
||||
var param = params[i].split("=");
|
||||
result[param[0]] = decodeURIComponent(param[1]);
|
||||
}
|
||||
return JSON.stringify(result);
|
||||
}
|
||||
|
||||
document.body.textContent = document.body.innerText = parse(window.location.search.slice(1));
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
2
lib/cloudinary/static/js/canvas-to-blob.min.js
vendored
Normal file
2
lib/cloudinary/static/js/canvas-to-blob.min.js
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
!function(t){"use strict";var e=t.HTMLCanvasElement&&t.HTMLCanvasElement.prototype,o=t.Blob&&function(){try{return Boolean(new Blob)}catch(t){return!1}}(),n=o&&t.Uint8Array&&function(){try{return 100===new Blob([new Uint8Array(100)]).size}catch(t){return!1}}(),r=t.BlobBuilder||t.WebKitBlobBuilder||t.MozBlobBuilder||t.MSBlobBuilder,a=/^data:((.*?)(;charset=.*?)?)(;base64)?,/,i=(o||r)&&t.atob&&t.ArrayBuffer&&t.Uint8Array&&function(t){var e,i,l,u,c,f,b,d,B;if(!(e=t.match(a)))throw new Error("invalid data URI");for(i=e[2]?e[1]:"text/plain"+(e[3]||";charset=US-ASCII"),l=!!e[4],u=t.slice(e[0].length),c=l?atob(u):decodeURIComponent(u),f=new ArrayBuffer(c.length),b=new Uint8Array(f),d=0;d<c.length;d+=1)b[d]=c.charCodeAt(d);return o?new Blob([n?b:f],{type:i}):((B=new r).append(f),B.getBlob(i))};t.HTMLCanvasElement&&!e.toBlob&&(e.mozGetAsFile?e.toBlob=function(t,o,n){var r=this;setTimeout(function(){t(n&&e.toDataURL&&i?i(r.toDataURL(o,n)):r.mozGetAsFile("blob",o))})}:e.toDataURL&&i&&(e.toBlob=function(t,e,o){var n=this;setTimeout(function(){t(i(n.toDataURL(e,o)))})})),"function"==typeof define&&define.amd?define(function(){return i}):"object"==typeof module&&module.exports?module.exports=i:t.dataURLtoBlob=i}(window);
|
||||
//# sourceMappingURL=canvas-to-blob.min.js.map
|
4722
lib/cloudinary/static/js/jquery.cloudinary.js
Normal file
4722
lib/cloudinary/static/js/jquery.cloudinary.js
Normal file
File diff suppressed because it is too large
Load Diff
326
lib/cloudinary/static/js/jquery.fileupload-image.js
vendored
Normal file
326
lib/cloudinary/static/js/jquery.fileupload-image.js
vendored
Normal file
@@ -0,0 +1,326 @@
|
||||
/*
|
||||
* jQuery File Upload Image Preview & Resize Plugin
|
||||
* https://github.com/blueimp/jQuery-File-Upload
|
||||
*
|
||||
* Copyright 2013, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
/* jshint nomen:false */
|
||||
/* global define, require, window, Blob */
|
||||
|
||||
;(function (factory) {
|
||||
'use strict';
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// Register as an anonymous AMD module:
|
||||
define([
|
||||
'jquery',
|
||||
'load-image',
|
||||
'load-image-meta',
|
||||
'load-image-scale',
|
||||
'load-image-exif',
|
||||
'canvas-to-blob',
|
||||
'./jquery.fileupload-process'
|
||||
], factory);
|
||||
} else if (typeof exports === 'object') {
|
||||
// Node/CommonJS:
|
||||
factory(
|
||||
require('jquery'),
|
||||
require('blueimp-load-image/js/load-image'),
|
||||
require('blueimp-load-image/js/load-image-meta'),
|
||||
require('blueimp-load-image/js/load-image-scale'),
|
||||
require('blueimp-load-image/js/load-image-exif'),
|
||||
require('blueimp-canvas-to-blob'),
|
||||
require('./jquery.fileupload-process')
|
||||
);
|
||||
} else {
|
||||
// Browser globals:
|
||||
factory(
|
||||
window.jQuery,
|
||||
window.loadImage
|
||||
);
|
||||
}
|
||||
}(function ($, loadImage) {
|
||||
'use strict';
|
||||
|
||||
// Prepend to the default processQueue:
|
||||
$.blueimp.fileupload.prototype.options.processQueue.unshift(
|
||||
{
|
||||
action: 'loadImageMetaData',
|
||||
disableImageHead: '@',
|
||||
disableExif: '@',
|
||||
disableExifThumbnail: '@',
|
||||
disableExifSub: '@',
|
||||
disableExifGps: '@',
|
||||
disabled: '@disableImageMetaDataLoad'
|
||||
},
|
||||
{
|
||||
action: 'loadImage',
|
||||
// Use the action as prefix for the "@" options:
|
||||
prefix: true,
|
||||
fileTypes: '@',
|
||||
maxFileSize: '@',
|
||||
noRevoke: '@',
|
||||
disabled: '@disableImageLoad'
|
||||
},
|
||||
{
|
||||
action: 'resizeImage',
|
||||
// Use "image" as prefix for the "@" options:
|
||||
prefix: 'image',
|
||||
maxWidth: '@',
|
||||
maxHeight: '@',
|
||||
minWidth: '@',
|
||||
minHeight: '@',
|
||||
crop: '@',
|
||||
orientation: '@',
|
||||
forceResize: '@',
|
||||
disabled: '@disableImageResize'
|
||||
},
|
||||
{
|
||||
action: 'saveImage',
|
||||
quality: '@imageQuality',
|
||||
type: '@imageType',
|
||||
disabled: '@disableImageResize'
|
||||
},
|
||||
{
|
||||
action: 'saveImageMetaData',
|
||||
disabled: '@disableImageMetaDataSave'
|
||||
},
|
||||
{
|
||||
action: 'resizeImage',
|
||||
// Use "preview" as prefix for the "@" options:
|
||||
prefix: 'preview',
|
||||
maxWidth: '@',
|
||||
maxHeight: '@',
|
||||
minWidth: '@',
|
||||
minHeight: '@',
|
||||
crop: '@',
|
||||
orientation: '@',
|
||||
thumbnail: '@',
|
||||
canvas: '@',
|
||||
disabled: '@disableImagePreview'
|
||||
},
|
||||
{
|
||||
action: 'setImage',
|
||||
name: '@imagePreviewName',
|
||||
disabled: '@disableImagePreview'
|
||||
},
|
||||
{
|
||||
action: 'deleteImageReferences',
|
||||
disabled: '@disableImageReferencesDeletion'
|
||||
}
|
||||
);
|
||||
|
||||
// The File Upload Resize plugin extends the fileupload widget
|
||||
// with image resize functionality:
|
||||
$.widget('blueimp.fileupload', $.blueimp.fileupload, {
|
||||
|
||||
options: {
|
||||
// The regular expression for the types of images to load:
|
||||
// matched against the file type:
|
||||
loadImageFileTypes: /^image\/(gif|jpeg|png|svg\+xml)$/,
|
||||
// The maximum file size of images to load:
|
||||
loadImageMaxFileSize: 10000000, // 10MB
|
||||
// The maximum width of resized images:
|
||||
imageMaxWidth: 1920,
|
||||
// The maximum height of resized images:
|
||||
imageMaxHeight: 1080,
|
||||
// Defines the image orientation (1-8) or takes the orientation
|
||||
// value from Exif data if set to true:
|
||||
imageOrientation: false,
|
||||
// Define if resized images should be cropped or only scaled:
|
||||
imageCrop: false,
|
||||
// Disable the resize image functionality by default:
|
||||
disableImageResize: true,
|
||||
// The maximum width of the preview images:
|
||||
previewMaxWidth: 80,
|
||||
// The maximum height of the preview images:
|
||||
previewMaxHeight: 80,
|
||||
// Defines the preview orientation (1-8) or takes the orientation
|
||||
// value from Exif data if set to true:
|
||||
previewOrientation: true,
|
||||
// Create the preview using the Exif data thumbnail:
|
||||
previewThumbnail: true,
|
||||
// Define if preview images should be cropped or only scaled:
|
||||
previewCrop: false,
|
||||
// Define if preview images should be resized as canvas elements:
|
||||
previewCanvas: true
|
||||
},
|
||||
|
||||
processActions: {
|
||||
|
||||
// Loads the image given via data.files and data.index
|
||||
// as img element, if the browser supports the File API.
|
||||
// Accepts the options fileTypes (regular expression)
|
||||
// and maxFileSize (integer) to limit the files to load:
|
||||
loadImage: function (data, options) {
|
||||
if (options.disabled) {
|
||||
return data;
|
||||
}
|
||||
var that = this,
|
||||
file = data.files[data.index],
|
||||
dfd = $.Deferred();
|
||||
if (($.type(options.maxFileSize) === 'number' &&
|
||||
file.size > options.maxFileSize) ||
|
||||
(options.fileTypes &&
|
||||
!options.fileTypes.test(file.type)) ||
|
||||
!loadImage(
|
||||
file,
|
||||
function (img) {
|
||||
if (img.src) {
|
||||
data.img = img;
|
||||
}
|
||||
dfd.resolveWith(that, [data]);
|
||||
},
|
||||
options
|
||||
)) {
|
||||
return data;
|
||||
}
|
||||
return dfd.promise();
|
||||
},
|
||||
|
||||
// Resizes the image given as data.canvas or data.img
|
||||
// and updates data.canvas or data.img with the resized image.
|
||||
// Also stores the resized image as preview property.
|
||||
// Accepts the options maxWidth, maxHeight, minWidth,
|
||||
// minHeight, canvas and crop:
|
||||
resizeImage: function (data, options) {
|
||||
if (options.disabled || !(data.canvas || data.img)) {
|
||||
return data;
|
||||
}
|
||||
options = $.extend({canvas: true}, options);
|
||||
var that = this,
|
||||
dfd = $.Deferred(),
|
||||
img = (options.canvas && data.canvas) || data.img,
|
||||
resolve = function (newImg) {
|
||||
if (newImg && (newImg.width !== img.width ||
|
||||
newImg.height !== img.height ||
|
||||
options.forceResize)) {
|
||||
data[newImg.getContext ? 'canvas' : 'img'] = newImg;
|
||||
}
|
||||
data.preview = newImg;
|
||||
dfd.resolveWith(that, [data]);
|
||||
},
|
||||
thumbnail;
|
||||
if (data.exif) {
|
||||
if (options.orientation === true) {
|
||||
options.orientation = data.exif.get('Orientation');
|
||||
}
|
||||
if (options.thumbnail) {
|
||||
thumbnail = data.exif.get('Thumbnail');
|
||||
if (thumbnail) {
|
||||
loadImage(thumbnail, resolve, options);
|
||||
return dfd.promise();
|
||||
}
|
||||
}
|
||||
// Prevent orienting the same image twice:
|
||||
if (data.orientation) {
|
||||
delete options.orientation;
|
||||
} else {
|
||||
data.orientation = options.orientation;
|
||||
}
|
||||
}
|
||||
if (img) {
|
||||
resolve(loadImage.scale(img, options));
|
||||
return dfd.promise();
|
||||
}
|
||||
return data;
|
||||
},
|
||||
|
||||
// Saves the processed image given as data.canvas
|
||||
// inplace at data.index of data.files:
|
||||
saveImage: function (data, options) {
|
||||
if (!data.canvas || options.disabled) {
|
||||
return data;
|
||||
}
|
||||
var that = this,
|
||||
file = data.files[data.index],
|
||||
dfd = $.Deferred();
|
||||
if (data.canvas.toBlob) {
|
||||
data.canvas.toBlob(
|
||||
function (blob) {
|
||||
if (!blob.name) {
|
||||
if (file.type === blob.type) {
|
||||
blob.name = file.name;
|
||||
} else if (file.name) {
|
||||
blob.name = file.name.replace(
|
||||
/\.\w+$/,
|
||||
'.' + blob.type.substr(6)
|
||||
);
|
||||
}
|
||||
}
|
||||
// Don't restore invalid meta data:
|
||||
if (file.type !== blob.type) {
|
||||
delete data.imageHead;
|
||||
}
|
||||
// Store the created blob at the position
|
||||
// of the original file in the files list:
|
||||
data.files[data.index] = blob;
|
||||
dfd.resolveWith(that, [data]);
|
||||
},
|
||||
options.type || file.type,
|
||||
options.quality
|
||||
);
|
||||
} else {
|
||||
return data;
|
||||
}
|
||||
return dfd.promise();
|
||||
},
|
||||
|
||||
loadImageMetaData: function (data, options) {
|
||||
if (options.disabled) {
|
||||
return data;
|
||||
}
|
||||
var that = this,
|
||||
dfd = $.Deferred();
|
||||
loadImage.parseMetaData(data.files[data.index], function (result) {
|
||||
$.extend(data, result);
|
||||
dfd.resolveWith(that, [data]);
|
||||
}, options);
|
||||
return dfd.promise();
|
||||
},
|
||||
|
||||
saveImageMetaData: function (data, options) {
|
||||
if (!(data.imageHead && data.canvas &&
|
||||
data.canvas.toBlob && !options.disabled)) {
|
||||
return data;
|
||||
}
|
||||
var file = data.files[data.index],
|
||||
blob = new Blob([
|
||||
data.imageHead,
|
||||
// Resized images always have a head size of 20 bytes,
|
||||
// including the JPEG marker and a minimal JFIF header:
|
||||
this._blobSlice.call(file, 20)
|
||||
], {type: file.type});
|
||||
blob.name = file.name;
|
||||
data.files[data.index] = blob;
|
||||
return data;
|
||||
},
|
||||
|
||||
// Sets the resized version of the image as a property of the
|
||||
// file object, must be called after "saveImage":
|
||||
setImage: function (data, options) {
|
||||
if (data.preview && !options.disabled) {
|
||||
data.files[data.index][options.name || 'preview'] = data.preview;
|
||||
}
|
||||
return data;
|
||||
},
|
||||
|
||||
deleteImageReferences: function (data, options) {
|
||||
if (!options.disabled) {
|
||||
delete data.img;
|
||||
delete data.canvas;
|
||||
delete data.preview;
|
||||
delete data.imageHead;
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}));
|
178
lib/cloudinary/static/js/jquery.fileupload-process.js
vendored
Normal file
178
lib/cloudinary/static/js/jquery.fileupload-process.js
vendored
Normal file
@@ -0,0 +1,178 @@
|
||||
/*
|
||||
* jQuery File Upload Processing Plugin
|
||||
* https://github.com/blueimp/jQuery-File-Upload
|
||||
*
|
||||
* Copyright 2012, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
/* jshint nomen:false */
|
||||
/* global define, require, window */
|
||||
|
||||
;(function (factory) {
|
||||
'use strict';
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// Register as an anonymous AMD module:
|
||||
define([
|
||||
'jquery',
|
||||
'./jquery.fileupload'
|
||||
], factory);
|
||||
} else if (typeof exports === 'object') {
|
||||
// Node/CommonJS:
|
||||
factory(
|
||||
require('jquery'),
|
||||
require('./jquery.fileupload')
|
||||
);
|
||||
} else {
|
||||
// Browser globals:
|
||||
factory(
|
||||
window.jQuery
|
||||
);
|
||||
}
|
||||
}(function ($) {
|
||||
'use strict';
|
||||
|
||||
var originalAdd = $.blueimp.fileupload.prototype.options.add;
|
||||
|
||||
// The File Upload Processing plugin extends the fileupload widget
|
||||
// with file processing functionality:
|
||||
$.widget('blueimp.fileupload', $.blueimp.fileupload, {
|
||||
|
||||
options: {
|
||||
// The list of processing actions:
|
||||
processQueue: [
|
||||
/*
|
||||
{
|
||||
action: 'log',
|
||||
type: 'debug'
|
||||
}
|
||||
*/
|
||||
],
|
||||
add: function (e, data) {
|
||||
var $this = $(this);
|
||||
data.process(function () {
|
||||
return $this.fileupload('process', data);
|
||||
});
|
||||
originalAdd.call(this, e, data);
|
||||
}
|
||||
},
|
||||
|
||||
processActions: {
|
||||
/*
|
||||
log: function (data, options) {
|
||||
console[options.type](
|
||||
'Processing "' + data.files[data.index].name + '"'
|
||||
);
|
||||
}
|
||||
*/
|
||||
},
|
||||
|
||||
_processFile: function (data, originalData) {
|
||||
var that = this,
|
||||
dfd = $.Deferred().resolveWith(that, [data]),
|
||||
chain = dfd.promise();
|
||||
this._trigger('process', null, data);
|
||||
$.each(data.processQueue, function (i, settings) {
|
||||
var func = function (data) {
|
||||
if (originalData.errorThrown) {
|
||||
return $.Deferred()
|
||||
.rejectWith(that, [originalData]).promise();
|
||||
}
|
||||
return that.processActions[settings.action].call(
|
||||
that,
|
||||
data,
|
||||
settings
|
||||
);
|
||||
};
|
||||
chain = chain.then(func, settings.always && func);
|
||||
});
|
||||
chain
|
||||
.done(function () {
|
||||
that._trigger('processdone', null, data);
|
||||
that._trigger('processalways', null, data);
|
||||
})
|
||||
.fail(function () {
|
||||
that._trigger('processfail', null, data);
|
||||
that._trigger('processalways', null, data);
|
||||
});
|
||||
return chain;
|
||||
},
|
||||
|
||||
// Replaces the settings of each processQueue item that
|
||||
// are strings starting with an "@", using the remaining
|
||||
// substring as key for the option map,
|
||||
// e.g. "@autoUpload" is replaced with options.autoUpload:
|
||||
_transformProcessQueue: function (options) {
|
||||
var processQueue = [];
|
||||
$.each(options.processQueue, function () {
|
||||
var settings = {},
|
||||
action = this.action,
|
||||
prefix = this.prefix === true ? action : this.prefix;
|
||||
$.each(this, function (key, value) {
|
||||
if ($.type(value) === 'string' &&
|
||||
value.charAt(0) === '@') {
|
||||
settings[key] = options[
|
||||
value.slice(1) || (prefix ? prefix +
|
||||
key.charAt(0).toUpperCase() + key.slice(1) : key)
|
||||
];
|
||||
} else {
|
||||
settings[key] = value;
|
||||
}
|
||||
|
||||
});
|
||||
processQueue.push(settings);
|
||||
});
|
||||
options.processQueue = processQueue;
|
||||
},
|
||||
|
||||
// Returns the number of files currently in the processsing queue:
|
||||
processing: function () {
|
||||
return this._processing;
|
||||
},
|
||||
|
||||
// Processes the files given as files property of the data parameter,
|
||||
// returns a Promise object that allows to bind callbacks:
|
||||
process: function (data) {
|
||||
var that = this,
|
||||
options = $.extend({}, this.options, data);
|
||||
if (options.processQueue && options.processQueue.length) {
|
||||
this._transformProcessQueue(options);
|
||||
if (this._processing === 0) {
|
||||
this._trigger('processstart');
|
||||
}
|
||||
$.each(data.files, function (index) {
|
||||
var opts = index ? $.extend({}, options) : options,
|
||||
func = function () {
|
||||
if (data.errorThrown) {
|
||||
return $.Deferred()
|
||||
.rejectWith(that, [data]).promise();
|
||||
}
|
||||
return that._processFile(opts, data);
|
||||
};
|
||||
opts.index = index;
|
||||
that._processing += 1;
|
||||
that._processingQueue = that._processingQueue.then(func, func)
|
||||
.always(function () {
|
||||
that._processing -= 1;
|
||||
if (that._processing === 0) {
|
||||
that._trigger('processstop');
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
return this._processingQueue;
|
||||
},
|
||||
|
||||
_create: function () {
|
||||
this._super();
|
||||
this._processing = 0;
|
||||
this._processingQueue = $.Deferred().resolveWith(this)
|
||||
.promise();
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}));
|
125
lib/cloudinary/static/js/jquery.fileupload-validate.js
vendored
Normal file
125
lib/cloudinary/static/js/jquery.fileupload-validate.js
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
/*
|
||||
* jQuery File Upload Validation Plugin
|
||||
* https://github.com/blueimp/jQuery-File-Upload
|
||||
*
|
||||
* Copyright 2013, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
/* global define, require, window */
|
||||
|
||||
;(function (factory) {
|
||||
'use strict';
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// Register as an anonymous AMD module:
|
||||
define([
|
||||
'jquery',
|
||||
'./jquery.fileupload-process'
|
||||
], factory);
|
||||
} else if (typeof exports === 'object') {
|
||||
// Node/CommonJS:
|
||||
factory(
|
||||
require('jquery'),
|
||||
require('./jquery.fileupload-process')
|
||||
);
|
||||
} else {
|
||||
// Browser globals:
|
||||
factory(
|
||||
window.jQuery
|
||||
);
|
||||
}
|
||||
}(function ($) {
|
||||
'use strict';
|
||||
|
||||
// Append to the default processQueue:
|
||||
$.blueimp.fileupload.prototype.options.processQueue.push(
|
||||
{
|
||||
action: 'validate',
|
||||
// Always trigger this action,
|
||||
// even if the previous action was rejected:
|
||||
always: true,
|
||||
// Options taken from the global options map:
|
||||
acceptFileTypes: '@',
|
||||
maxFileSize: '@',
|
||||
minFileSize: '@',
|
||||
maxNumberOfFiles: '@',
|
||||
disabled: '@disableValidation'
|
||||
}
|
||||
);
|
||||
|
||||
// The File Upload Validation plugin extends the fileupload widget
|
||||
// with file validation functionality:
|
||||
$.widget('blueimp.fileupload', $.blueimp.fileupload, {
|
||||
|
||||
options: {
|
||||
/*
|
||||
// The regular expression for allowed file types, matches
|
||||
// against either file type or file name:
|
||||
acceptFileTypes: /(\.|\/)(gif|jpe?g|png)$/i,
|
||||
// The maximum allowed file size in bytes:
|
||||
maxFileSize: 10000000, // 10 MB
|
||||
// The minimum allowed file size in bytes:
|
||||
minFileSize: undefined, // No minimal file size
|
||||
// The limit of files to be uploaded:
|
||||
maxNumberOfFiles: 10,
|
||||
*/
|
||||
|
||||
// Function returning the current number of files,
|
||||
// has to be overriden for maxNumberOfFiles validation:
|
||||
getNumberOfFiles: $.noop,
|
||||
|
||||
// Error and info messages:
|
||||
messages: {
|
||||
maxNumberOfFiles: 'Maximum number of files exceeded',
|
||||
acceptFileTypes: 'File type not allowed',
|
||||
maxFileSize: 'File is too large',
|
||||
minFileSize: 'File is too small'
|
||||
}
|
||||
},
|
||||
|
||||
processActions: {
|
||||
|
||||
validate: function (data, options) {
|
||||
if (options.disabled) {
|
||||
return data;
|
||||
}
|
||||
var dfd = $.Deferred(),
|
||||
settings = this.options,
|
||||
file = data.files[data.index],
|
||||
fileSize;
|
||||
if (options.minFileSize || options.maxFileSize) {
|
||||
fileSize = file.size;
|
||||
}
|
||||
if ($.type(options.maxNumberOfFiles) === 'number' &&
|
||||
(settings.getNumberOfFiles() || 0) + data.files.length >
|
||||
options.maxNumberOfFiles) {
|
||||
file.error = settings.i18n('maxNumberOfFiles');
|
||||
} else if (options.acceptFileTypes &&
|
||||
!(options.acceptFileTypes.test(file.type) ||
|
||||
options.acceptFileTypes.test(file.name))) {
|
||||
file.error = settings.i18n('acceptFileTypes');
|
||||
} else if (fileSize > options.maxFileSize) {
|
||||
file.error = settings.i18n('maxFileSize');
|
||||
} else if ($.type(fileSize) === 'number' &&
|
||||
fileSize < options.minFileSize) {
|
||||
file.error = settings.i18n('minFileSize');
|
||||
} else {
|
||||
delete file.error;
|
||||
}
|
||||
if (file.error || data.files.error) {
|
||||
data.files.error = true;
|
||||
dfd.rejectWith(this, [data]);
|
||||
} else {
|
||||
dfd.resolveWith(this, [data]);
|
||||
}
|
||||
return dfd.promise();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}));
|
1482
lib/cloudinary/static/js/jquery.fileupload.js
vendored
Normal file
1482
lib/cloudinary/static/js/jquery.fileupload.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
224
lib/cloudinary/static/js/jquery.iframe-transport.js
Normal file
224
lib/cloudinary/static/js/jquery.iframe-transport.js
Normal file
@@ -0,0 +1,224 @@
|
||||
/*
|
||||
* jQuery Iframe Transport Plugin
|
||||
* https://github.com/blueimp/jQuery-File-Upload
|
||||
*
|
||||
* Copyright 2011, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
/* global define, require, window, document, JSON */
|
||||
|
||||
;(function (factory) {
|
||||
'use strict';
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// Register as an anonymous AMD module:
|
||||
define(['jquery'], factory);
|
||||
} else if (typeof exports === 'object') {
|
||||
// Node/CommonJS:
|
||||
factory(require('jquery'));
|
||||
} else {
|
||||
// Browser globals:
|
||||
factory(window.jQuery);
|
||||
}
|
||||
}(function ($) {
|
||||
'use strict';
|
||||
|
||||
// Helper variable to create unique names for the transport iframes:
|
||||
var counter = 0,
|
||||
jsonAPI = $,
|
||||
jsonParse = 'parseJSON';
|
||||
|
||||
if ('JSON' in window && 'parse' in JSON) {
|
||||
jsonAPI = JSON;
|
||||
jsonParse = 'parse';
|
||||
}
|
||||
|
||||
// The iframe transport accepts four additional options:
|
||||
// options.fileInput: a jQuery collection of file input fields
|
||||
// options.paramName: the parameter name for the file form data,
|
||||
// overrides the name property of the file input field(s),
|
||||
// can be a string or an array of strings.
|
||||
// options.formData: an array of objects with name and value properties,
|
||||
// equivalent to the return data of .serializeArray(), e.g.:
|
||||
// [{name: 'a', value: 1}, {name: 'b', value: 2}]
|
||||
// options.initialIframeSrc: the URL of the initial iframe src,
|
||||
// by default set to "javascript:false;"
|
||||
$.ajaxTransport('iframe', function (options) {
|
||||
if (options.async) {
|
||||
// javascript:false as initial iframe src
|
||||
// prevents warning popups on HTTPS in IE6:
|
||||
/*jshint scripturl: true */
|
||||
var initialIframeSrc = options.initialIframeSrc || 'javascript:false;',
|
||||
/*jshint scripturl: false */
|
||||
form,
|
||||
iframe,
|
||||
addParamChar;
|
||||
return {
|
||||
send: function (_, completeCallback) {
|
||||
form = $('<form style="display:none;"></form>');
|
||||
form.attr('accept-charset', options.formAcceptCharset);
|
||||
addParamChar = /\?/.test(options.url) ? '&' : '?';
|
||||
// XDomainRequest only supports GET and POST:
|
||||
if (options.type === 'DELETE') {
|
||||
options.url = options.url + addParamChar + '_method=DELETE';
|
||||
options.type = 'POST';
|
||||
} else if (options.type === 'PUT') {
|
||||
options.url = options.url + addParamChar + '_method=PUT';
|
||||
options.type = 'POST';
|
||||
} else if (options.type === 'PATCH') {
|
||||
options.url = options.url + addParamChar + '_method=PATCH';
|
||||
options.type = 'POST';
|
||||
}
|
||||
// IE versions below IE8 cannot set the name property of
|
||||
// elements that have already been added to the DOM,
|
||||
// so we set the name along with the iframe HTML markup:
|
||||
counter += 1;
|
||||
iframe = $(
|
||||
'<iframe src="' + initialIframeSrc +
|
||||
'" name="iframe-transport-' + counter + '"></iframe>'
|
||||
).bind('load', function () {
|
||||
var fileInputClones,
|
||||
paramNames = $.isArray(options.paramName) ?
|
||||
options.paramName : [options.paramName];
|
||||
iframe
|
||||
.unbind('load')
|
||||
.bind('load', function () {
|
||||
var response;
|
||||
// Wrap in a try/catch block to catch exceptions thrown
|
||||
// when trying to access cross-domain iframe contents:
|
||||
try {
|
||||
response = iframe.contents();
|
||||
// Google Chrome and Firefox do not throw an
|
||||
// exception when calling iframe.contents() on
|
||||
// cross-domain requests, so we unify the response:
|
||||
if (!response.length || !response[0].firstChild) {
|
||||
throw new Error();
|
||||
}
|
||||
} catch (e) {
|
||||
response = undefined;
|
||||
}
|
||||
// The complete callback returns the
|
||||
// iframe content document as response object:
|
||||
completeCallback(
|
||||
200,
|
||||
'success',
|
||||
{'iframe': response}
|
||||
);
|
||||
// Fix for IE endless progress bar activity bug
|
||||
// (happens on form submits to iframe targets):
|
||||
$('<iframe src="' + initialIframeSrc + '"></iframe>')
|
||||
.appendTo(form);
|
||||
window.setTimeout(function () {
|
||||
// Removing the form in a setTimeout call
|
||||
// allows Chrome's developer tools to display
|
||||
// the response result
|
||||
form.remove();
|
||||
}, 0);
|
||||
});
|
||||
form
|
||||
.prop('target', iframe.prop('name'))
|
||||
.prop('action', options.url)
|
||||
.prop('method', options.type);
|
||||
if (options.formData) {
|
||||
$.each(options.formData, function (index, field) {
|
||||
$('<input type="hidden"/>')
|
||||
.prop('name', field.name)
|
||||
.val(field.value)
|
||||
.appendTo(form);
|
||||
});
|
||||
}
|
||||
if (options.fileInput && options.fileInput.length &&
|
||||
options.type === 'POST') {
|
||||
fileInputClones = options.fileInput.clone();
|
||||
// Insert a clone for each file input field:
|
||||
options.fileInput.after(function (index) {
|
||||
return fileInputClones[index];
|
||||
});
|
||||
if (options.paramName) {
|
||||
options.fileInput.each(function (index) {
|
||||
$(this).prop(
|
||||
'name',
|
||||
paramNames[index] || options.paramName
|
||||
);
|
||||
});
|
||||
}
|
||||
// Appending the file input fields to the hidden form
|
||||
// removes them from their original location:
|
||||
form
|
||||
.append(options.fileInput)
|
||||
.prop('enctype', 'multipart/form-data')
|
||||
// enctype must be set as encoding for IE:
|
||||
.prop('encoding', 'multipart/form-data');
|
||||
// Remove the HTML5 form attribute from the input(s):
|
||||
options.fileInput.removeAttr('form');
|
||||
}
|
||||
form.submit();
|
||||
// Insert the file input fields at their original location
|
||||
// by replacing the clones with the originals:
|
||||
if (fileInputClones && fileInputClones.length) {
|
||||
options.fileInput.each(function (index, input) {
|
||||
var clone = $(fileInputClones[index]);
|
||||
// Restore the original name and form properties:
|
||||
$(input)
|
||||
.prop('name', clone.prop('name'))
|
||||
.attr('form', clone.attr('form'));
|
||||
clone.replaceWith(input);
|
||||
});
|
||||
}
|
||||
});
|
||||
form.append(iframe).appendTo(document.body);
|
||||
},
|
||||
abort: function () {
|
||||
if (iframe) {
|
||||
// javascript:false as iframe src aborts the request
|
||||
// and prevents warning popups on HTTPS in IE6.
|
||||
// concat is used to avoid the "Script URL" JSLint error:
|
||||
iframe
|
||||
.unbind('load')
|
||||
.prop('src', initialIframeSrc);
|
||||
}
|
||||
if (form) {
|
||||
form.remove();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// The iframe transport returns the iframe content document as response.
|
||||
// The following adds converters from iframe to text, json, html, xml
|
||||
// and script.
|
||||
// Please note that the Content-Type for JSON responses has to be text/plain
|
||||
// or text/html, if the browser doesn't include application/json in the
|
||||
// Accept header, else IE will show a download dialog.
|
||||
// The Content-Type for XML responses on the other hand has to be always
|
||||
// application/xml or text/xml, so IE properly parses the XML response.
|
||||
// See also
|
||||
// https://github.com/blueimp/jQuery-File-Upload/wiki/Setup#content-type-negotiation
|
||||
$.ajaxSetup({
|
||||
converters: {
|
||||
'iframe text': function (iframe) {
|
||||
return iframe && $(iframe[0].body).text();
|
||||
},
|
||||
'iframe json': function (iframe) {
|
||||
return iframe && jsonAPI[jsonParse]($(iframe[0].body).text());
|
||||
},
|
||||
'iframe html': function (iframe) {
|
||||
return iframe && $(iframe[0].body).html();
|
||||
},
|
||||
'iframe xml': function (iframe) {
|
||||
var xmlDoc = iframe && iframe[0];
|
||||
return xmlDoc && $.isXMLDoc(xmlDoc) ? xmlDoc :
|
||||
$.parseXML((xmlDoc.XMLDocument && xmlDoc.XMLDocument.xml) ||
|
||||
$(xmlDoc.body).html());
|
||||
},
|
||||
'iframe script': function (iframe) {
|
||||
return iframe && $.globalEval($(iframe[0].body).text());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
}));
|
572
lib/cloudinary/static/js/jquery.ui.widget.js
vendored
Normal file
572
lib/cloudinary/static/js/jquery.ui.widget.js
vendored
Normal file
@@ -0,0 +1,572 @@
|
||||
/*! jQuery UI - v1.11.4+CommonJS - 2015-08-28
|
||||
* http://jqueryui.com
|
||||
* Includes: widget.js
|
||||
* Copyright 2015 jQuery Foundation and other contributors; Licensed MIT */
|
||||
|
||||
(function( factory ) {
|
||||
if ( typeof define === "function" && define.amd ) {
|
||||
|
||||
// AMD. Register as an anonymous module.
|
||||
define([ "jquery" ], factory );
|
||||
|
||||
} else if ( typeof exports === "object" ) {
|
||||
|
||||
// Node/CommonJS
|
||||
factory( require( "jquery" ) );
|
||||
|
||||
} else {
|
||||
|
||||
// Browser globals
|
||||
factory( jQuery );
|
||||
}
|
||||
}(function( $ ) {
|
||||
/*!
|
||||
* jQuery UI Widget 1.11.4
|
||||
* http://jqueryui.com
|
||||
*
|
||||
* Copyright jQuery Foundation and other contributors
|
||||
* Released under the MIT license.
|
||||
* http://jquery.org/license
|
||||
*
|
||||
* http://api.jqueryui.com/jQuery.widget/
|
||||
*/
|
||||
|
||||
|
||||
var widget_uuid = 0,
|
||||
widget_slice = Array.prototype.slice;
|
||||
|
||||
$.cleanData = (function( orig ) {
|
||||
return function( elems ) {
|
||||
var events, elem, i;
|
||||
for ( i = 0; (elem = elems[i]) != null; i++ ) {
|
||||
try {
|
||||
|
||||
// Only trigger remove when necessary to save time
|
||||
events = $._data( elem, "events" );
|
||||
if ( events && events.remove ) {
|
||||
$( elem ).triggerHandler( "remove" );
|
||||
}
|
||||
|
||||
// http://bugs.jquery.com/ticket/8235
|
||||
} catch ( e ) {}
|
||||
}
|
||||
orig( elems );
|
||||
};
|
||||
})( $.cleanData );
|
||||
|
||||
$.widget = function( name, base, prototype ) {
|
||||
var fullName, existingConstructor, constructor, basePrototype,
|
||||
// proxiedPrototype allows the provided prototype to remain unmodified
|
||||
// so that it can be used as a mixin for multiple widgets (#8876)
|
||||
proxiedPrototype = {},
|
||||
namespace = name.split( "." )[ 0 ];
|
||||
|
||||
name = name.split( "." )[ 1 ];
|
||||
fullName = namespace + "-" + name;
|
||||
|
||||
if ( !prototype ) {
|
||||
prototype = base;
|
||||
base = $.Widget;
|
||||
}
|
||||
|
||||
// create selector for plugin
|
||||
$.expr[ ":" ][ fullName.toLowerCase() ] = function( elem ) {
|
||||
return !!$.data( elem, fullName );
|
||||
};
|
||||
|
||||
$[ namespace ] = $[ namespace ] || {};
|
||||
existingConstructor = $[ namespace ][ name ];
|
||||
constructor = $[ namespace ][ name ] = function( options, element ) {
|
||||
// allow instantiation without "new" keyword
|
||||
if ( !this._createWidget ) {
|
||||
return new constructor( options, element );
|
||||
}
|
||||
|
||||
// allow instantiation without initializing for simple inheritance
|
||||
// must use "new" keyword (the code above always passes args)
|
||||
if ( arguments.length ) {
|
||||
this._createWidget( options, element );
|
||||
}
|
||||
};
|
||||
// extend with the existing constructor to carry over any static properties
|
||||
$.extend( constructor, existingConstructor, {
|
||||
version: prototype.version,
|
||||
// copy the object used to create the prototype in case we need to
|
||||
// redefine the widget later
|
||||
_proto: $.extend( {}, prototype ),
|
||||
// track widgets that inherit from this widget in case this widget is
|
||||
// redefined after a widget inherits from it
|
||||
_childConstructors: []
|
||||
});
|
||||
|
||||
basePrototype = new base();
|
||||
// we need to make the options hash a property directly on the new instance
|
||||
// otherwise we'll modify the options hash on the prototype that we're
|
||||
// inheriting from
|
||||
basePrototype.options = $.widget.extend( {}, basePrototype.options );
|
||||
$.each( prototype, function( prop, value ) {
|
||||
if ( !$.isFunction( value ) ) {
|
||||
proxiedPrototype[ prop ] = value;
|
||||
return;
|
||||
}
|
||||
proxiedPrototype[ prop ] = (function() {
|
||||
var _super = function() {
|
||||
return base.prototype[ prop ].apply( this, arguments );
|
||||
},
|
||||
_superApply = function( args ) {
|
||||
return base.prototype[ prop ].apply( this, args );
|
||||
};
|
||||
return function() {
|
||||
var __super = this._super,
|
||||
__superApply = this._superApply,
|
||||
returnValue;
|
||||
|
||||
this._super = _super;
|
||||
this._superApply = _superApply;
|
||||
|
||||
returnValue = value.apply( this, arguments );
|
||||
|
||||
this._super = __super;
|
||||
this._superApply = __superApply;
|
||||
|
||||
return returnValue;
|
||||
};
|
||||
})();
|
||||
});
|
||||
constructor.prototype = $.widget.extend( basePrototype, {
|
||||
// TODO: remove support for widgetEventPrefix
|
||||
// always use the name + a colon as the prefix, e.g., draggable:start
|
||||
// don't prefix for widgets that aren't DOM-based
|
||||
widgetEventPrefix: existingConstructor ? (basePrototype.widgetEventPrefix || name) : name
|
||||
}, proxiedPrototype, {
|
||||
constructor: constructor,
|
||||
namespace: namespace,
|
||||
widgetName: name,
|
||||
widgetFullName: fullName
|
||||
});
|
||||
|
||||
// If this widget is being redefined then we need to find all widgets that
|
||||
// are inheriting from it and redefine all of them so that they inherit from
|
||||
// the new version of this widget. We're essentially trying to replace one
|
||||
// level in the prototype chain.
|
||||
if ( existingConstructor ) {
|
||||
$.each( existingConstructor._childConstructors, function( i, child ) {
|
||||
var childPrototype = child.prototype;
|
||||
|
||||
// redefine the child widget using the same prototype that was
|
||||
// originally used, but inherit from the new version of the base
|
||||
$.widget( childPrototype.namespace + "." + childPrototype.widgetName, constructor, child._proto );
|
||||
});
|
||||
// remove the list of existing child constructors from the old constructor
|
||||
// so the old child constructors can be garbage collected
|
||||
delete existingConstructor._childConstructors;
|
||||
} else {
|
||||
base._childConstructors.push( constructor );
|
||||
}
|
||||
|
||||
$.widget.bridge( name, constructor );
|
||||
|
||||
return constructor;
|
||||
};
|
||||
|
||||
$.widget.extend = function( target ) {
|
||||
var input = widget_slice.call( arguments, 1 ),
|
||||
inputIndex = 0,
|
||||
inputLength = input.length,
|
||||
key,
|
||||
value;
|
||||
for ( ; inputIndex < inputLength; inputIndex++ ) {
|
||||
for ( key in input[ inputIndex ] ) {
|
||||
value = input[ inputIndex ][ key ];
|
||||
if ( input[ inputIndex ].hasOwnProperty( key ) && value !== undefined ) {
|
||||
// Clone objects
|
||||
if ( $.isPlainObject( value ) ) {
|
||||
target[ key ] = $.isPlainObject( target[ key ] ) ?
|
||||
$.widget.extend( {}, target[ key ], value ) :
|
||||
// Don't extend strings, arrays, etc. with objects
|
||||
$.widget.extend( {}, value );
|
||||
// Copy everything else by reference
|
||||
} else {
|
||||
target[ key ] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return target;
|
||||
};
|
||||
|
||||
$.widget.bridge = function( name, object ) {
|
||||
var fullName = object.prototype.widgetFullName || name;
|
||||
$.fn[ name ] = function( options ) {
|
||||
var isMethodCall = typeof options === "string",
|
||||
args = widget_slice.call( arguments, 1 ),
|
||||
returnValue = this;
|
||||
|
||||
if ( isMethodCall ) {
|
||||
this.each(function() {
|
||||
var methodValue,
|
||||
instance = $.data( this, fullName );
|
||||
if ( options === "instance" ) {
|
||||
returnValue = instance;
|
||||
return false;
|
||||
}
|
||||
if ( !instance ) {
|
||||
return $.error( "cannot call methods on " + name + " prior to initialization; " +
|
||||
"attempted to call method '" + options + "'" );
|
||||
}
|
||||
if ( !$.isFunction( instance[options] ) || options.charAt( 0 ) === "_" ) {
|
||||
return $.error( "no such method '" + options + "' for " + name + " widget instance" );
|
||||
}
|
||||
methodValue = instance[ options ].apply( instance, args );
|
||||
if ( methodValue !== instance && methodValue !== undefined ) {
|
||||
returnValue = methodValue && methodValue.jquery ?
|
||||
returnValue.pushStack( methodValue.get() ) :
|
||||
methodValue;
|
||||
return false;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
|
||||
// Allow multiple hashes to be passed on init
|
||||
if ( args.length ) {
|
||||
options = $.widget.extend.apply( null, [ options ].concat(args) );
|
||||
}
|
||||
|
||||
this.each(function() {
|
||||
var instance = $.data( this, fullName );
|
||||
if ( instance ) {
|
||||
instance.option( options || {} );
|
||||
if ( instance._init ) {
|
||||
instance._init();
|
||||
}
|
||||
} else {
|
||||
$.data( this, fullName, new object( options, this ) );
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return returnValue;
|
||||
};
|
||||
};
|
||||
|
||||
$.Widget = function( /* options, element */ ) {};
|
||||
$.Widget._childConstructors = [];
|
||||
|
||||
$.Widget.prototype = {
|
||||
widgetName: "widget",
|
||||
widgetEventPrefix: "",
|
||||
defaultElement: "<div>",
|
||||
options: {
|
||||
disabled: false,
|
||||
|
||||
// callbacks
|
||||
create: null
|
||||
},
|
||||
_createWidget: function( options, element ) {
|
||||
element = $( element || this.defaultElement || this )[ 0 ];
|
||||
this.element = $( element );
|
||||
this.uuid = widget_uuid++;
|
||||
this.eventNamespace = "." + this.widgetName + this.uuid;
|
||||
|
||||
this.bindings = $();
|
||||
this.hoverable = $();
|
||||
this.focusable = $();
|
||||
|
||||
if ( element !== this ) {
|
||||
$.data( element, this.widgetFullName, this );
|
||||
this._on( true, this.element, {
|
||||
remove: function( event ) {
|
||||
if ( event.target === element ) {
|
||||
this.destroy();
|
||||
}
|
||||
}
|
||||
});
|
||||
this.document = $( element.style ?
|
||||
// element within the document
|
||||
element.ownerDocument :
|
||||
// element is window or document
|
||||
element.document || element );
|
||||
this.window = $( this.document[0].defaultView || this.document[0].parentWindow );
|
||||
}
|
||||
|
||||
this.options = $.widget.extend( {},
|
||||
this.options,
|
||||
this._getCreateOptions(),
|
||||
options );
|
||||
|
||||
this._create();
|
||||
this._trigger( "create", null, this._getCreateEventData() );
|
||||
this._init();
|
||||
},
|
||||
_getCreateOptions: $.noop,
|
||||
_getCreateEventData: $.noop,
|
||||
_create: $.noop,
|
||||
_init: $.noop,
|
||||
|
||||
destroy: function() {
|
||||
this._destroy();
|
||||
// we can probably remove the unbind calls in 2.0
|
||||
// all event bindings should go through this._on()
|
||||
this.element
|
||||
.unbind( this.eventNamespace )
|
||||
.removeData( this.widgetFullName )
|
||||
// support: jquery <1.6.3
|
||||
// http://bugs.jquery.com/ticket/9413
|
||||
.removeData( $.camelCase( this.widgetFullName ) );
|
||||
this.widget()
|
||||
.unbind( this.eventNamespace )
|
||||
.removeAttr( "aria-disabled" )
|
||||
.removeClass(
|
||||
this.widgetFullName + "-disabled " +
|
||||
"ui-state-disabled" );
|
||||
|
||||
// clean up events and states
|
||||
this.bindings.unbind( this.eventNamespace );
|
||||
this.hoverable.removeClass( "ui-state-hover" );
|
||||
this.focusable.removeClass( "ui-state-focus" );
|
||||
},
|
||||
_destroy: $.noop,
|
||||
|
||||
widget: function() {
|
||||
return this.element;
|
||||
},
|
||||
|
||||
option: function( key, value ) {
|
||||
var options = key,
|
||||
parts,
|
||||
curOption,
|
||||
i;
|
||||
|
||||
if ( arguments.length === 0 ) {
|
||||
// don't return a reference to the internal hash
|
||||
return $.widget.extend( {}, this.options );
|
||||
}
|
||||
|
||||
if ( typeof key === "string" ) {
|
||||
// handle nested keys, e.g., "foo.bar" => { foo: { bar: ___ } }
|
||||
options = {};
|
||||
parts = key.split( "." );
|
||||
key = parts.shift();
|
||||
if ( parts.length ) {
|
||||
curOption = options[ key ] = $.widget.extend( {}, this.options[ key ] );
|
||||
for ( i = 0; i < parts.length - 1; i++ ) {
|
||||
curOption[ parts[ i ] ] = curOption[ parts[ i ] ] || {};
|
||||
curOption = curOption[ parts[ i ] ];
|
||||
}
|
||||
key = parts.pop();
|
||||
if ( arguments.length === 1 ) {
|
||||
return curOption[ key ] === undefined ? null : curOption[ key ];
|
||||
}
|
||||
curOption[ key ] = value;
|
||||
} else {
|
||||
if ( arguments.length === 1 ) {
|
||||
return this.options[ key ] === undefined ? null : this.options[ key ];
|
||||
}
|
||||
options[ key ] = value;
|
||||
}
|
||||
}
|
||||
|
||||
this._setOptions( options );
|
||||
|
||||
return this;
|
||||
},
|
||||
_setOptions: function( options ) {
|
||||
var key;
|
||||
|
||||
for ( key in options ) {
|
||||
this._setOption( key, options[ key ] );
|
||||
}
|
||||
|
||||
return this;
|
||||
},
|
||||
_setOption: function( key, value ) {
|
||||
this.options[ key ] = value;
|
||||
|
||||
if ( key === "disabled" ) {
|
||||
this.widget()
|
||||
.toggleClass( this.widgetFullName + "-disabled", !!value );
|
||||
|
||||
// If the widget is becoming disabled, then nothing is interactive
|
||||
if ( value ) {
|
||||
this.hoverable.removeClass( "ui-state-hover" );
|
||||
this.focusable.removeClass( "ui-state-focus" );
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
},
|
||||
|
||||
enable: function() {
|
||||
return this._setOptions({ disabled: false });
|
||||
},
|
||||
disable: function() {
|
||||
return this._setOptions({ disabled: true });
|
||||
},
|
||||
|
||||
_on: function( suppressDisabledCheck, element, handlers ) {
|
||||
var delegateElement,
|
||||
instance = this;
|
||||
|
||||
// no suppressDisabledCheck flag, shuffle arguments
|
||||
if ( typeof suppressDisabledCheck !== "boolean" ) {
|
||||
handlers = element;
|
||||
element = suppressDisabledCheck;
|
||||
suppressDisabledCheck = false;
|
||||
}
|
||||
|
||||
// no element argument, shuffle and use this.element
|
||||
if ( !handlers ) {
|
||||
handlers = element;
|
||||
element = this.element;
|
||||
delegateElement = this.widget();
|
||||
} else {
|
||||
element = delegateElement = $( element );
|
||||
this.bindings = this.bindings.add( element );
|
||||
}
|
||||
|
||||
$.each( handlers, function( event, handler ) {
|
||||
function handlerProxy() {
|
||||
// allow widgets to customize the disabled handling
|
||||
// - disabled as an array instead of boolean
|
||||
// - disabled class as method for disabling individual parts
|
||||
if ( !suppressDisabledCheck &&
|
||||
( instance.options.disabled === true ||
|
||||
$( this ).hasClass( "ui-state-disabled" ) ) ) {
|
||||
return;
|
||||
}
|
||||
return ( typeof handler === "string" ? instance[ handler ] : handler )
|
||||
.apply( instance, arguments );
|
||||
}
|
||||
|
||||
// copy the guid so direct unbinding works
|
||||
if ( typeof handler !== "string" ) {
|
||||
handlerProxy.guid = handler.guid =
|
||||
handler.guid || handlerProxy.guid || $.guid++;
|
||||
}
|
||||
|
||||
var match = event.match( /^([\w:-]*)\s*(.*)$/ ),
|
||||
eventName = match[1] + instance.eventNamespace,
|
||||
selector = match[2];
|
||||
if ( selector ) {
|
||||
delegateElement.delegate( selector, eventName, handlerProxy );
|
||||
} else {
|
||||
element.bind( eventName, handlerProxy );
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_off: function( element, eventName ) {
|
||||
eventName = (eventName || "").split( " " ).join( this.eventNamespace + " " ) +
|
||||
this.eventNamespace;
|
||||
element.unbind( eventName ).undelegate( eventName );
|
||||
|
||||
// Clear the stack to avoid memory leaks (#10056)
|
||||
this.bindings = $( this.bindings.not( element ).get() );
|
||||
this.focusable = $( this.focusable.not( element ).get() );
|
||||
this.hoverable = $( this.hoverable.not( element ).get() );
|
||||
},
|
||||
|
||||
_delay: function( handler, delay ) {
|
||||
function handlerProxy() {
|
||||
return ( typeof handler === "string" ? instance[ handler ] : handler )
|
||||
.apply( instance, arguments );
|
||||
}
|
||||
var instance = this;
|
||||
return setTimeout( handlerProxy, delay || 0 );
|
||||
},
|
||||
|
||||
_hoverable: function( element ) {
|
||||
this.hoverable = this.hoverable.add( element );
|
||||
this._on( element, {
|
||||
mouseenter: function( event ) {
|
||||
$( event.currentTarget ).addClass( "ui-state-hover" );
|
||||
},
|
||||
mouseleave: function( event ) {
|
||||
$( event.currentTarget ).removeClass( "ui-state-hover" );
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_focusable: function( element ) {
|
||||
this.focusable = this.focusable.add( element );
|
||||
this._on( element, {
|
||||
focusin: function( event ) {
|
||||
$( event.currentTarget ).addClass( "ui-state-focus" );
|
||||
},
|
||||
focusout: function( event ) {
|
||||
$( event.currentTarget ).removeClass( "ui-state-focus" );
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
_trigger: function( type, event, data ) {
|
||||
var prop, orig,
|
||||
callback = this.options[ type ];
|
||||
|
||||
data = data || {};
|
||||
event = $.Event( event );
|
||||
event.type = ( type === this.widgetEventPrefix ?
|
||||
type :
|
||||
this.widgetEventPrefix + type ).toLowerCase();
|
||||
// the original event may come from any element
|
||||
// so we need to reset the target on the new event
|
||||
event.target = this.element[ 0 ];
|
||||
|
||||
// copy original event properties over to the new event
|
||||
orig = event.originalEvent;
|
||||
if ( orig ) {
|
||||
for ( prop in orig ) {
|
||||
if ( !( prop in event ) ) {
|
||||
event[ prop ] = orig[ prop ];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.element.trigger( event, data );
|
||||
return !( $.isFunction( callback ) &&
|
||||
callback.apply( this.element[0], [ event ].concat( data ) ) === false ||
|
||||
event.isDefaultPrevented() );
|
||||
}
|
||||
};
|
||||
|
||||
$.each( { show: "fadeIn", hide: "fadeOut" }, function( method, defaultEffect ) {
|
||||
$.Widget.prototype[ "_" + method ] = function( element, options, callback ) {
|
||||
if ( typeof options === "string" ) {
|
||||
options = { effect: options };
|
||||
}
|
||||
var hasOptions,
|
||||
effectName = !options ?
|
||||
method :
|
||||
options === true || typeof options === "number" ?
|
||||
defaultEffect :
|
||||
options.effect || defaultEffect;
|
||||
options = options || {};
|
||||
if ( typeof options === "number" ) {
|
||||
options = { duration: options };
|
||||
}
|
||||
hasOptions = !$.isEmptyObject( options );
|
||||
options.complete = callback;
|
||||
if ( options.delay ) {
|
||||
element.delay( options.delay );
|
||||
}
|
||||
if ( hasOptions && $.effects && $.effects.effect[ effectName ] ) {
|
||||
element[ method ]( options );
|
||||
} else if ( effectName !== method && element[ effectName ] ) {
|
||||
element[ effectName ]( options.duration, options.easing, callback );
|
||||
} else {
|
||||
element.queue(function( next ) {
|
||||
$( this )[ method ]();
|
||||
if ( callback ) {
|
||||
callback.call( element[ 0 ] );
|
||||
}
|
||||
next();
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
var widget = $.widget;
|
||||
|
||||
|
||||
|
||||
}));
|
2
lib/cloudinary/static/js/load-image.all.min.js
vendored
Normal file
2
lib/cloudinary/static/js/load-image.all.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
12
lib/cloudinary/templates/cloudinary_direct_upload.html
Normal file
12
lib/cloudinary/templates/cloudinary_direct_upload.html
Normal file
@@ -0,0 +1,12 @@
|
||||
<form action={{url}} method="POST" enctype="multipart/form-data">
|
||||
{% for name, value in params.items %}
|
||||
<input type="hidden" name="{{name}}" value="{{value}}"/>
|
||||
{% endfor %}
|
||||
{% block extra %} {% endblock %}
|
||||
{% block file %}
|
||||
<input type="file" name="file"/>
|
||||
{% endblock %}
|
||||
{% block submit %}
|
||||
<input type="submit"/>
|
||||
{% endblock %}
|
||||
</form>
|
14
lib/cloudinary/templates/cloudinary_includes.html
Normal file
14
lib/cloudinary/templates/cloudinary_includes.html
Normal file
@@ -0,0 +1,14 @@
|
||||
{% load staticfiles %}
|
||||
|
||||
<script src="{% static "js/jquery.ui.widget.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.iframe-transport.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.fileupload.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.cloudinary.js" %}" type="text/javascript"></script>
|
||||
|
||||
{% if processing %}
|
||||
<script src="{% static "js/load-image.all.min.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/canvas-to-blob.min.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.fileupload-process.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.fileupload-image.js" %}" type="text/javascript"></script>
|
||||
<script src="{% static "js/jquery.fileupload-validate.js" %}" type="text/javascript"></script>
|
||||
{% endif %}
|
3
lib/cloudinary/templates/cloudinary_js_config.html
Normal file
3
lib/cloudinary/templates/cloudinary_js_config.html
Normal file
@@ -0,0 +1,3 @@
|
||||
<script type='text/javascript'>
|
||||
$.cloudinary.config({{ params|safe }});
|
||||
</script>
|
1
lib/cloudinary/templatetags/__init__.py
Normal file
1
lib/cloudinary/templatetags/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
#
|
85
lib/cloudinary/templatetags/cloudinary.py
Normal file
85
lib/cloudinary/templatetags/cloudinary.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
|
||||
from django import template
|
||||
from django.forms import Form
|
||||
from django.utils.safestring import mark_safe
|
||||
|
||||
import cloudinary
|
||||
from cloudinary import CloudinaryResource, utils, uploader
|
||||
from cloudinary.forms import CloudinaryJsFileField, cl_init_js_callbacks
|
||||
from cloudinary.compat import PY3
|
||||
|
||||
register = template.Library()
|
||||
|
||||
|
||||
@register.simple_tag(takes_context=True)
|
||||
def cloudinary_url(context, source, options_dict=None, **options):
|
||||
if options_dict is None:
|
||||
options = dict(**options)
|
||||
else:
|
||||
options = dict(options_dict, **options)
|
||||
try:
|
||||
if context['request'].is_secure() and 'secure' not in options:
|
||||
options['secure'] = True
|
||||
except KeyError:
|
||||
pass
|
||||
if not isinstance(source, CloudinaryResource):
|
||||
source = CloudinaryResource(source)
|
||||
return source.build_url(**options)
|
||||
|
||||
|
||||
@register.simple_tag(name='cloudinary', takes_context=True)
|
||||
def cloudinary_tag(context, image, options_dict=None, **options):
|
||||
if options_dict is None:
|
||||
options = dict(**options)
|
||||
else:
|
||||
options = dict(options_dict, **options)
|
||||
try:
|
||||
if context['request'].is_secure() and 'secure' not in options:
|
||||
options['secure'] = True
|
||||
except KeyError:
|
||||
pass
|
||||
if not isinstance(image, CloudinaryResource):
|
||||
image = CloudinaryResource(image)
|
||||
return mark_safe(image.image(**options))
|
||||
|
||||
|
||||
@register.simple_tag
|
||||
def cloudinary_direct_upload_field(field_name="image", request=None):
|
||||
form = type("OnTheFlyForm", (Form,), {field_name: CloudinaryJsFileField()})()
|
||||
if request:
|
||||
cl_init_js_callbacks(form, request)
|
||||
value = form[field_name]
|
||||
if not PY3:
|
||||
value = unicode(value)
|
||||
return value
|
||||
|
||||
|
||||
"""Deprecated - please use cloudinary_direct_upload_field, or a proper form"""
|
||||
@register.inclusion_tag('cloudinary_direct_upload.html')
|
||||
def cloudinary_direct_upload(callback_url, **options):
|
||||
params = utils.build_upload_params(callback=callback_url, **options)
|
||||
params = utils.sign_request(params, options)
|
||||
|
||||
api_url = utils.cloudinary_api_url("upload", resource_type=options.get("resource_type", "image"),
|
||||
upload_prefix=options.get("upload_prefix"))
|
||||
|
||||
return {"params": params, "url": api_url}
|
||||
|
||||
|
||||
@register.inclusion_tag('cloudinary_includes.html')
|
||||
def cloudinary_includes(processing=False):
|
||||
return {"processing": processing}
|
||||
|
||||
|
||||
CLOUDINARY_JS_CONFIG_PARAMS = ("api_key", "cloud_name", "private_cdn", "secure_distribution", "cdn_subdomain")
|
||||
@register.inclusion_tag('cloudinary_js_config.html')
|
||||
def cloudinary_js_config():
|
||||
config = cloudinary.config()
|
||||
return dict(
|
||||
params=json.dumps(dict(
|
||||
(param, getattr(config, param)) for param in CLOUDINARY_JS_CONFIG_PARAMS if getattr(config, param, None)
|
||||
))
|
||||
)
|
325
lib/cloudinary/uploader.py
Normal file
325
lib/cloudinary/uploader.py
Normal file
@@ -0,0 +1,325 @@
|
||||
# Copyright Cloudinary
|
||||
import json
|
||||
import re
|
||||
import socket
|
||||
from os.path import getsize
|
||||
|
||||
import cloudinary
|
||||
import urllib3
|
||||
import certifi
|
||||
from cloudinary import utils
|
||||
from cloudinary.api import Error
|
||||
from cloudinary.compat import string_types
|
||||
from urllib3.exceptions import HTTPError
|
||||
from urllib3 import PoolManager
|
||||
|
||||
try:
|
||||
from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
|
||||
except Exception:
|
||||
def is_appengine_sandbox():
|
||||
return False
|
||||
|
||||
try: # Python 2.7+
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from urllib3.packages.ordered_dict import OrderedDict
|
||||
|
||||
if is_appengine_sandbox():
|
||||
# AppEngineManager uses AppEngine's URLFetch API behind the scenes
|
||||
_http = AppEngineManager()
|
||||
else:
|
||||
# PoolManager uses a socket-level API behind the scenes
|
||||
_http = PoolManager(
|
||||
cert_reqs='CERT_REQUIRED',
|
||||
ca_certs=certifi.where()
|
||||
)
|
||||
|
||||
|
||||
def upload(file, **options):
|
||||
params = utils.build_upload_params(**options)
|
||||
return call_api("upload", params, file=file, **options)
|
||||
|
||||
|
||||
def unsigned_upload(file, upload_preset, **options):
|
||||
return upload(file, upload_preset=upload_preset, unsigned=True, **options)
|
||||
|
||||
|
||||
def upload_image(file, **options):
|
||||
result = upload(file, **options)
|
||||
return cloudinary.CloudinaryImage(
|
||||
result["public_id"], version=str(result["version"]),
|
||||
format=result.get("format"), metadata=result)
|
||||
|
||||
|
||||
def upload_resource(file, **options):
|
||||
result = upload(file, **options)
|
||||
return cloudinary.CloudinaryResource(
|
||||
result["public_id"], version=str(result["version"]),
|
||||
format=result.get("format"), type=result["type"], resource_type=result["resource_type"], metadata=result)
|
||||
|
||||
|
||||
def upload_large(file, **options):
|
||||
""" Upload large files. """
|
||||
upload_id = utils.random_public_id()
|
||||
with open(file, 'rb') as file_io:
|
||||
results = None
|
||||
current_loc = 0
|
||||
chunk_size = options.get("chunk_size", 20000000)
|
||||
file_size = getsize(file)
|
||||
chunk = file_io.read(chunk_size)
|
||||
while chunk:
|
||||
range = "bytes {0}-{1}/{2}".format(current_loc, current_loc + len(chunk) - 1, file_size)
|
||||
current_loc += len(chunk)
|
||||
|
||||
results = upload_large_part((file, chunk),
|
||||
http_headers={"Content-Range": range, "X-Unique-Upload-Id": upload_id},
|
||||
**options)
|
||||
options["public_id"] = results.get("public_id")
|
||||
chunk = file_io.read(chunk_size)
|
||||
return results
|
||||
|
||||
|
||||
def upload_large_part(file, **options):
|
||||
""" Upload large files. """
|
||||
params = utils.build_upload_params(**options)
|
||||
if 'resource_type' not in options: options['resource_type'] = "raw"
|
||||
return call_api("upload", params, file=file, **options)
|
||||
|
||||
|
||||
def destroy(public_id, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"type": options.get("type"),
|
||||
"invalidate": options.get("invalidate"),
|
||||
"public_id": public_id
|
||||
}
|
||||
return call_api("destroy", params, **options)
|
||||
|
||||
|
||||
def rename(from_public_id, to_public_id, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"type": options.get("type"),
|
||||
"overwrite": options.get("overwrite"),
|
||||
"invalidate": options.get("invalidate"),
|
||||
"from_public_id": from_public_id,
|
||||
"to_public_id": to_public_id
|
||||
}
|
||||
return call_api("rename", params, **options)
|
||||
|
||||
|
||||
def explicit(public_id, **options):
|
||||
params = utils.build_upload_params(**options)
|
||||
params["public_id"] = public_id
|
||||
return call_api("explicit", params, **options)
|
||||
|
||||
|
||||
def create_archive(**options):
|
||||
params = utils.archive_params(**options)
|
||||
if options.get("target_format") is not None:
|
||||
params["target_format"] = options.get("target_format")
|
||||
return call_api("generate_archive", params, **options)
|
||||
|
||||
|
||||
def create_zip(**options):
|
||||
return create_archive(target_format="zip", **options)
|
||||
|
||||
|
||||
def generate_sprite(tag, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"tag": tag,
|
||||
"async": options.get("async"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"transformation": utils.generate_transformation_string(fetch_format=options.get("format"), **options)[0]
|
||||
}
|
||||
return call_api("sprite", params, **options)
|
||||
|
||||
|
||||
def multi(tag, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"tag": tag,
|
||||
"format": options.get("format"),
|
||||
"async": options.get("async"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"transformation": utils.generate_transformation_string(**options)[0]
|
||||
}
|
||||
return call_api("multi", params, **options)
|
||||
|
||||
|
||||
def explode(public_id, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"public_id": public_id,
|
||||
"format": options.get("format"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"transformation": utils.generate_transformation_string(**options)[0]
|
||||
}
|
||||
return call_api("explode", params, **options)
|
||||
|
||||
|
||||
# options may include 'exclusive' (boolean) which causes clearing this tag from all other resources
|
||||
def add_tag(tag, public_ids=None, **options):
|
||||
exclusive = options.pop("exclusive", None)
|
||||
command = "set_exclusive" if exclusive else "add"
|
||||
return call_tags_api(tag, command, public_ids, **options)
|
||||
|
||||
|
||||
def remove_tag(tag, public_ids=None, **options):
|
||||
return call_tags_api(tag, "remove", public_ids, **options)
|
||||
|
||||
|
||||
def replace_tag(tag, public_ids=None, **options):
|
||||
return call_tags_api(tag, "replace", public_ids, **options)
|
||||
|
||||
|
||||
def remove_all_tags(public_ids, **options):
|
||||
"""
|
||||
Remove all tags from the specified public IDs.
|
||||
:param public_ids: the public IDs of the resources to update
|
||||
:param options: additional options passed to the request
|
||||
:return: dictionary with a list of public IDs that were updated
|
||||
"""
|
||||
return call_tags_api(None, "remove_all", public_ids, **options)
|
||||
|
||||
|
||||
def add_context(context, public_ids, **options):
|
||||
"""
|
||||
Add a context keys and values. If a particular key already exists, the value associated with the key is updated.
|
||||
:param context: dictionary of context
|
||||
:param public_ids: the public IDs of the resources to update
|
||||
:param options: additional options passed to the request
|
||||
:return: dictionary with a list of public IDs that were updated
|
||||
"""
|
||||
return call_context_api(context, "add", public_ids, **options)
|
||||
|
||||
|
||||
def remove_all_context(public_ids, **options):
|
||||
"""
|
||||
Remove all custom context from the specified public IDs.
|
||||
:param public_ids: the public IDs of the resources to update
|
||||
:param options: additional options passed to the request
|
||||
:return: dictionary with a list of public IDs that were updated
|
||||
"""
|
||||
return call_context_api(None, "remove_all", public_ids, **options)
|
||||
|
||||
|
||||
def call_tags_api(tag, command, public_ids=None, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"tag": tag,
|
||||
"public_ids": utils.build_array(public_ids),
|
||||
"command": command,
|
||||
"type": options.get("type")
|
||||
}
|
||||
return call_api("tags", params, **options)
|
||||
|
||||
|
||||
def call_context_api(context, command, public_ids=None, **options):
|
||||
params = {
|
||||
"timestamp": utils.now(),
|
||||
"context": utils.encode_context(context),
|
||||
"public_ids": utils.build_array(public_ids),
|
||||
"command": command,
|
||||
"type": options.get("type")
|
||||
}
|
||||
return call_api("context", params, **options)
|
||||
|
||||
|
||||
TEXT_PARAMS = ["public_id",
|
||||
"font_family",
|
||||
"font_size",
|
||||
"font_color",
|
||||
"text_align",
|
||||
"font_weight",
|
||||
"font_style",
|
||||
"background",
|
||||
"opacity",
|
||||
"text_decoration"
|
||||
]
|
||||
|
||||
|
||||
def text(text, **options):
|
||||
params = {"timestamp": utils.now(), "text": text}
|
||||
for key in TEXT_PARAMS:
|
||||
params[key] = options.get(key)
|
||||
return call_api("text", params, **options)
|
||||
|
||||
|
||||
def call_api(action, params, http_headers=None, return_error=False, unsigned=False, file=None, timeout=None, **options):
|
||||
if http_headers is None:
|
||||
http_headers = {}
|
||||
file_io = None
|
||||
try:
|
||||
if unsigned:
|
||||
params = utils.cleanup_params(params)
|
||||
else:
|
||||
params = utils.sign_request(params, options)
|
||||
|
||||
param_list = OrderedDict()
|
||||
for k, v in params.items():
|
||||
if isinstance(v, list):
|
||||
for i in range(len(v)):
|
||||
param_list["{0}[{1}]".format(k, i)] = v[i]
|
||||
elif v:
|
||||
param_list[k] = v
|
||||
|
||||
api_url = utils.cloudinary_api_url(action, **options)
|
||||
if file:
|
||||
if isinstance(file, string_types):
|
||||
if re.match(r'ftp:|https?:|s3:|data:[^;]*;base64,([a-zA-Z0-9\/+\n=]+)$', file):
|
||||
# URL
|
||||
name = None
|
||||
data = file
|
||||
else:
|
||||
# file path
|
||||
name = file
|
||||
with open(file, "rb") as opened:
|
||||
data = opened.read()
|
||||
elif hasattr(file, 'read') and callable(file.read):
|
||||
# stream
|
||||
data = file.read()
|
||||
name = file.name if hasattr(file, 'name') and isinstance(file.name, str) else "stream"
|
||||
elif isinstance(file, tuple):
|
||||
name = None
|
||||
data = file
|
||||
else:
|
||||
# Not a string, not a stream
|
||||
name = "file"
|
||||
data = file
|
||||
|
||||
param_list["file"] = (name, data) if name else data
|
||||
|
||||
headers = {"User-Agent": cloudinary.get_user_agent()}
|
||||
headers.update(http_headers)
|
||||
|
||||
kw = {}
|
||||
if timeout is not None:
|
||||
kw['timeout'] = timeout
|
||||
|
||||
code = 200
|
||||
try:
|
||||
response = _http.request("POST", api_url, param_list, headers, **kw)
|
||||
except HTTPError as e:
|
||||
raise Error("Unexpected error - {0!r}".format(e))
|
||||
except socket.error as e:
|
||||
raise Error("Socket error: {0!r}".format(e))
|
||||
|
||||
try:
|
||||
result = json.loads(response.data.decode('utf-8'))
|
||||
except Exception as e:
|
||||
# Error is parsing json
|
||||
raise Error("Error parsing server response (%d) - %s. Got - %s", response.status, response, e)
|
||||
|
||||
if "error" in result:
|
||||
if response.status not in [200, 400, 401, 403, 404, 500]:
|
||||
code = response.status
|
||||
if return_error:
|
||||
result["error"]["http_code"] = code
|
||||
else:
|
||||
raise Error(result["error"]["message"])
|
||||
|
||||
return result
|
||||
finally:
|
||||
if file_io: file_io.close()
|
912
lib/cloudinary/utils.py
Normal file
912
lib/cloudinary/utils.py
Normal file
@@ -0,0 +1,912 @@
|
||||
# Copyright Cloudinary
|
||||
import base64
|
||||
import copy
|
||||
import hashlib
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
import struct
|
||||
import time
|
||||
import zlib
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime, date
|
||||
from fractions import Fraction
|
||||
|
||||
import six.moves.urllib.parse
|
||||
from six import iteritems
|
||||
|
||||
import cloudinary
|
||||
from cloudinary import auth_token
|
||||
from cloudinary.compat import PY3, to_bytes, to_bytearray, to_string, string_types, urlparse
|
||||
|
||||
VAR_NAME_RE = r'(\$\([a-zA-Z]\w+\))'
|
||||
|
||||
urlencode = six.moves.urllib.parse.urlencode
|
||||
unquote = six.moves.urllib.parse.unquote
|
||||
|
||||
""" @deprecated: use cloudinary.SHARED_CDN """
|
||||
SHARED_CDN = "res.cloudinary.com"
|
||||
|
||||
DEFAULT_RESPONSIVE_WIDTH_TRANSFORMATION = {"width": "auto", "crop": "limit"}
|
||||
|
||||
RANGE_VALUE_RE = r'^(?P<value>(\d+\.)?\d+)(?P<modifier>[%pP])?$'
|
||||
RANGE_RE = r'^(\d+\.)?\d+[%pP]?\.\.(\d+\.)?\d+[%pP]?$'
|
||||
FLOAT_RE = r'^(\d+)\.(\d+)?$'
|
||||
__LAYER_KEYWORD_PARAMS = [("font_weight", "normal"),
|
||||
("font_style", "normal"),
|
||||
("text_decoration", "none"),
|
||||
("text_align", None),
|
||||
("stroke", "none")]
|
||||
|
||||
|
||||
def build_array(arg):
|
||||
if isinstance(arg, list):
|
||||
return arg
|
||||
elif arg is None:
|
||||
return []
|
||||
else:
|
||||
return [arg]
|
||||
|
||||
|
||||
def build_list_of_dicts(val):
|
||||
"""
|
||||
Converts a value that can be presented as a list of dict.
|
||||
|
||||
In case top level item is not a list, it is wrapped with a list
|
||||
|
||||
Valid values examples:
|
||||
- Valid dict: {"k": "v", "k2","v2"}
|
||||
- List of dict: [{"k": "v"}, {"k2","v2"}]
|
||||
- JSON decodable string: '{"k": "v"}', or '[{"k": "v"}]'
|
||||
- List of JSON decodable strings: ['{"k": "v"}', '{"k2","v2"}']
|
||||
|
||||
Invalid values examples:
|
||||
- ["not", "a", "dict"]
|
||||
- [123, None],
|
||||
- [["another", "list"]]
|
||||
|
||||
:param val: Input value
|
||||
:type val: Union[list, dict, str]
|
||||
|
||||
:return: Converted(or original) list of dict
|
||||
:raises: ValueError in case value cannot be converted to a list of dict
|
||||
"""
|
||||
if val is None:
|
||||
return []
|
||||
|
||||
if isinstance(val, str):
|
||||
# use OrderedDict to preserve order
|
||||
val = json.loads(val, object_pairs_hook=OrderedDict)
|
||||
|
||||
if isinstance(val, dict):
|
||||
val = [val]
|
||||
|
||||
for index, item in enumerate(val):
|
||||
if isinstance(item, str):
|
||||
# use OrderedDict to preserve order
|
||||
val[index] = json.loads(item, object_pairs_hook=OrderedDict)
|
||||
if not isinstance(val[index], dict):
|
||||
raise ValueError("Expected a list of dicts")
|
||||
return val
|
||||
|
||||
|
||||
def encode_double_array(array):
|
||||
array = build_array(array)
|
||||
if len(array) > 0 and isinstance(array[0], list):
|
||||
return "|".join([",".join([str(i) for i in build_array(inner)]) for inner in array])
|
||||
else:
|
||||
return ",".join([str(i) for i in array])
|
||||
|
||||
|
||||
def encode_dict(arg):
|
||||
if isinstance(arg, dict):
|
||||
if PY3:
|
||||
items = arg.items()
|
||||
else:
|
||||
items = arg.iteritems()
|
||||
return "|".join((k + "=" + v) for k, v in items)
|
||||
else:
|
||||
return arg
|
||||
|
||||
|
||||
def encode_context(context):
|
||||
"""
|
||||
:param context: dict of context to be encoded
|
||||
:return: a joined string of all keys and values properly escaped and separated by a pipe character
|
||||
"""
|
||||
|
||||
if not isinstance(context, dict):
|
||||
return context
|
||||
|
||||
return "|".join(("{}={}".format(k, v.replace("=", "\\=").replace("|", "\\|"))) for k, v in iteritems(context))
|
||||
|
||||
|
||||
def json_encode(value):
|
||||
"""
|
||||
Converts value to a json encoded string
|
||||
|
||||
:param value: value to be encoded
|
||||
|
||||
:return: JSON encoded string
|
||||
"""
|
||||
return json.dumps(value, default=__json_serializer, separators=(',', ':'))
|
||||
|
||||
|
||||
def generate_transformation_string(**options):
|
||||
responsive_width = options.pop("responsive_width", cloudinary.config().responsive_width)
|
||||
size = options.pop("size", None)
|
||||
if size:
|
||||
options["width"], options["height"] = size.split("x")
|
||||
width = options.get("width")
|
||||
height = options.get("height")
|
||||
has_layer = ("underlay" in options) or ("overlay" in options)
|
||||
|
||||
crop = options.pop("crop", None)
|
||||
angle = ".".join([str(value) for value in build_array(options.pop("angle", None))])
|
||||
no_html_sizes = has_layer or angle or crop == "fit" or crop == "limit" or responsive_width
|
||||
|
||||
if width and (str(width).startswith("auto") or str(width) == "ow" or is_fraction(width) or no_html_sizes):
|
||||
del options["width"]
|
||||
if height and (str(height) == "oh" or is_fraction(height) or no_html_sizes):
|
||||
del options["height"]
|
||||
|
||||
background = options.pop("background", None)
|
||||
if background:
|
||||
background = background.replace("#", "rgb:")
|
||||
color = options.pop("color", None)
|
||||
if color:
|
||||
color = color.replace("#", "rgb:")
|
||||
|
||||
base_transformations = build_array(options.pop("transformation", None))
|
||||
if any(isinstance(bs, dict) for bs in base_transformations):
|
||||
def recurse(bs):
|
||||
if isinstance(bs, dict):
|
||||
return generate_transformation_string(**bs)[0]
|
||||
else:
|
||||
return generate_transformation_string(transformation=bs)[0]
|
||||
base_transformations = list(map(recurse, base_transformations))
|
||||
named_transformation = None
|
||||
else:
|
||||
named_transformation = ".".join(base_transformations)
|
||||
base_transformations = []
|
||||
|
||||
effect = options.pop("effect", None)
|
||||
if isinstance(effect, list):
|
||||
effect = ":".join([str(x) for x in effect])
|
||||
elif isinstance(effect, dict):
|
||||
effect = ":".join([str(x) for x in list(effect.items())[0]])
|
||||
|
||||
border = options.pop("border", None)
|
||||
if isinstance(border, dict):
|
||||
border_color = border.get("color", "black").replace("#", "rgb:")
|
||||
border = "%(width)spx_solid_%(color)s" % {"color": border_color,
|
||||
"width": str(border.get("width", 2))}
|
||||
|
||||
flags = ".".join(build_array(options.pop("flags", None)))
|
||||
dpr = options.pop("dpr", cloudinary.config().dpr)
|
||||
duration = norm_range_value(options.pop("duration", None))
|
||||
start_offset = norm_range_value(options.pop("start_offset", None))
|
||||
end_offset = norm_range_value(options.pop("end_offset", None))
|
||||
offset = split_range(options.pop("offset", None))
|
||||
if offset:
|
||||
start_offset = norm_range_value(offset[0])
|
||||
end_offset = norm_range_value(offset[1])
|
||||
|
||||
video_codec = process_video_codec_param(options.pop("video_codec", None))
|
||||
|
||||
aspect_ratio = options.pop("aspect_ratio", None)
|
||||
if isinstance(aspect_ratio, Fraction):
|
||||
aspect_ratio = str(aspect_ratio.numerator) + ":" + str(aspect_ratio.denominator)
|
||||
|
||||
overlay = process_layer(options.pop("overlay", None), "overlay")
|
||||
underlay = process_layer(options.pop("underlay", None), "underlay")
|
||||
if_value = process_conditional(options.pop("if", None))
|
||||
|
||||
params = {
|
||||
"a": normalize_expression(angle),
|
||||
"ar": normalize_expression(aspect_ratio),
|
||||
"b": background,
|
||||
"bo": border,
|
||||
"c": crop,
|
||||
"co": color,
|
||||
"dpr": normalize_expression(dpr),
|
||||
"du": normalize_expression(duration),
|
||||
"e": normalize_expression(effect),
|
||||
"eo": normalize_expression(end_offset),
|
||||
"fl": flags,
|
||||
"h": normalize_expression(height),
|
||||
"l": overlay,
|
||||
"o": normalize_expression(options.pop('opacity',None)),
|
||||
"q": normalize_expression(options.pop('quality',None)),
|
||||
"r": normalize_expression(options.pop('radius',None)),
|
||||
"so": normalize_expression(start_offset),
|
||||
"t": named_transformation,
|
||||
"u": underlay,
|
||||
"w": normalize_expression(width),
|
||||
"x": normalize_expression(options.pop('x',None)),
|
||||
"y": normalize_expression(options.pop('y',None)),
|
||||
"vc": video_codec,
|
||||
"z": normalize_expression(options.pop('zoom',None))
|
||||
}
|
||||
simple_params = {
|
||||
"ac": "audio_codec",
|
||||
"af": "audio_frequency",
|
||||
"br": "bit_rate",
|
||||
"cs": "color_space",
|
||||
"d": "default_image",
|
||||
"dl": "delay",
|
||||
"dn": "density",
|
||||
"f": "fetch_format",
|
||||
"g": "gravity",
|
||||
"ki": "keyframe_interval",
|
||||
"p": "prefix",
|
||||
"pg": "page",
|
||||
"sp": "streaming_profile",
|
||||
"vs": "video_sampling",
|
||||
}
|
||||
|
||||
for param, option in simple_params.items():
|
||||
params[param] = options.pop(option, None)
|
||||
|
||||
variables = options.pop('variables',{})
|
||||
var_params = []
|
||||
for key,value in options.items():
|
||||
if re.match(r'^\$', key):
|
||||
var_params.append(u"{0}_{1}".format(key, normalize_expression(str(value))))
|
||||
|
||||
var_params.sort()
|
||||
|
||||
if variables:
|
||||
for var in variables:
|
||||
var_params.append(u"{0}_{1}".format(var[0], normalize_expression(str(var[1]))))
|
||||
|
||||
|
||||
variables = ','.join(var_params)
|
||||
|
||||
sorted_params = sorted([param + "_" + str(value) for param, value in params.items() if (value or value == 0)])
|
||||
if variables:
|
||||
sorted_params.insert(0, str(variables))
|
||||
|
||||
if if_value is not None:
|
||||
sorted_params.insert(0, "if_" + str(if_value))
|
||||
transformation = ",".join(sorted_params)
|
||||
if "raw_transformation" in options:
|
||||
transformation = transformation + "," + options.pop("raw_transformation")
|
||||
transformations = base_transformations + [transformation]
|
||||
if responsive_width:
|
||||
responsive_width_transformation = cloudinary.config().responsive_width_transformation \
|
||||
or DEFAULT_RESPONSIVE_WIDTH_TRANSFORMATION
|
||||
transformations += [generate_transformation_string(**responsive_width_transformation)[0]]
|
||||
url = "/".join([trans for trans in transformations if trans])
|
||||
|
||||
if str(width).startswith("auto") or responsive_width:
|
||||
options["responsive"] = True
|
||||
if dpr == "auto":
|
||||
options["hidpi"] = True
|
||||
return url, options
|
||||
|
||||
|
||||
def is_fraction(width):
|
||||
width = str(width)
|
||||
return re.match(FLOAT_RE, width) and float(width) < 1
|
||||
|
||||
|
||||
def split_range(range):
|
||||
if (isinstance(range, list) or isinstance(range, tuple)) and len(range) >= 2:
|
||||
return [range[0], range[-1]]
|
||||
elif isinstance(range, string_types) and re.match(RANGE_RE, range):
|
||||
return range.split("..", 1)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def norm_range_value(value):
|
||||
if value is None: return None
|
||||
|
||||
match = re.match(RANGE_VALUE_RE, str(value))
|
||||
|
||||
if match is None: return None
|
||||
|
||||
modifier = ''
|
||||
if match.group('modifier') is not None:
|
||||
modifier = 'p'
|
||||
return match.group('value') + modifier
|
||||
|
||||
|
||||
def process_video_codec_param(param):
|
||||
out_param = param
|
||||
if isinstance(out_param, dict):
|
||||
out_param = param['codec']
|
||||
if 'profile' in param:
|
||||
out_param = out_param + ':' + param['profile']
|
||||
if 'level' in param:
|
||||
out_param = out_param + ':' + param['level']
|
||||
return out_param
|
||||
|
||||
|
||||
def cleanup_params(params):
|
||||
return dict([(k, __safe_value(v)) for (k, v) in params.items() if v is not None and not v == ""])
|
||||
|
||||
|
||||
def sign_request(params, options):
|
||||
api_key = options.get("api_key", cloudinary.config().api_key)
|
||||
if not api_key: raise ValueError("Must supply api_key")
|
||||
api_secret = options.get("api_secret", cloudinary.config().api_secret)
|
||||
if not api_secret: raise ValueError("Must supply api_secret")
|
||||
|
||||
params = cleanup_params(params)
|
||||
params["signature"] = api_sign_request(params, api_secret)
|
||||
params["api_key"] = api_key
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def api_sign_request(params_to_sign, api_secret):
|
||||
params = [(k + "=" + (",".join(v) if isinstance(v, list) else str(v))) for k, v in params_to_sign.items() if v]
|
||||
to_sign = "&".join(sorted(params))
|
||||
return hashlib.sha1(to_bytes(to_sign + api_secret)).hexdigest()
|
||||
|
||||
|
||||
def breakpoint_settings_mapper(breakpoint_settings):
|
||||
breakpoint_settings = copy.deepcopy(breakpoint_settings)
|
||||
transformation = breakpoint_settings.get("transformation")
|
||||
if transformation is not None:
|
||||
breakpoint_settings["transformation"], _ = generate_transformation_string(**transformation)
|
||||
return breakpoint_settings
|
||||
|
||||
|
||||
def generate_responsive_breakpoints_string(breakpoints):
|
||||
if breakpoints is None:
|
||||
return None
|
||||
breakpoints = build_array(breakpoints)
|
||||
return json.dumps(list(map(breakpoint_settings_mapper, breakpoints)))
|
||||
|
||||
|
||||
def finalize_source(source, format, url_suffix):
|
||||
source = re.sub(r'([^:])/+', r'\1/', source)
|
||||
if re.match(r'^https?:/', source):
|
||||
source = smart_escape(source)
|
||||
source_to_sign = source
|
||||
else:
|
||||
source = unquote(source)
|
||||
if not PY3: source = source.encode('utf8')
|
||||
source = smart_escape(source)
|
||||
source_to_sign = source
|
||||
if url_suffix is not None:
|
||||
if re.search(r'[\./]', url_suffix): raise ValueError("url_suffix should not include . or /")
|
||||
source = source + "/" + url_suffix
|
||||
if format is not None:
|
||||
source = source + "." + format
|
||||
source_to_sign = source_to_sign + "." + format
|
||||
|
||||
return source, source_to_sign
|
||||
|
||||
|
||||
def finalize_resource_type(resource_type, type, url_suffix, use_root_path, shorten):
|
||||
upload_type = type or "upload"
|
||||
if url_suffix is not None:
|
||||
if resource_type == "image" and upload_type == "upload":
|
||||
resource_type = "images"
|
||||
upload_type = None
|
||||
elif resource_type == "raw" and upload_type == "upload":
|
||||
resource_type = "files"
|
||||
upload_type = None
|
||||
else:
|
||||
raise ValueError("URL Suffix only supported for image/upload and raw/upload")
|
||||
|
||||
if use_root_path:
|
||||
if (resource_type == "image" and upload_type == "upload") or (resource_type == "images" and upload_type is None):
|
||||
resource_type = None
|
||||
upload_type = None
|
||||
else:
|
||||
raise ValueError("Root path only supported for image/upload")
|
||||
|
||||
if shorten and resource_type == "image" and upload_type == "upload":
|
||||
resource_type = "iu"
|
||||
upload_type = None
|
||||
|
||||
return resource_type, upload_type
|
||||
|
||||
|
||||
def unsigned_download_url_prefix(source, cloud_name, private_cdn, cdn_subdomain, secure_cdn_subdomain, cname, secure,
|
||||
secure_distribution):
|
||||
"""cdn_subdomain and secure_cdn_subdomain
|
||||
1) Customers in shared distribution (e.g. res.cloudinary.com)
|
||||
if cdn_domain is true uses res-[1-5].cloudinary.com for both http and https. Setting secure_cdn_subdomain to false disables this for https.
|
||||
2) Customers with private cdn
|
||||
if cdn_domain is true uses cloudname-res-[1-5].cloudinary.com for http
|
||||
if secure_cdn_domain is true uses cloudname-res-[1-5].cloudinary.com for https (please contact support if you require this)
|
||||
3) Customers with cname
|
||||
if cdn_domain is true uses a[1-5].cname for http. For https, uses the same naming scheme as 1 for shared distribution and as 2 for private distribution."""
|
||||
shared_domain = not private_cdn
|
||||
shard = __crc(source)
|
||||
if secure:
|
||||
if secure_distribution is None or secure_distribution == cloudinary.OLD_AKAMAI_SHARED_CDN:
|
||||
secure_distribution = cloud_name + "-res.cloudinary.com" if private_cdn else cloudinary.SHARED_CDN
|
||||
|
||||
shared_domain = shared_domain or secure_distribution == cloudinary.SHARED_CDN
|
||||
if secure_cdn_subdomain is None and shared_domain:
|
||||
secure_cdn_subdomain = cdn_subdomain
|
||||
|
||||
if secure_cdn_subdomain:
|
||||
secure_distribution = re.sub('res.cloudinary.com', "res-" + shard + ".cloudinary.com", secure_distribution)
|
||||
|
||||
prefix = "https://" + secure_distribution
|
||||
elif cname:
|
||||
subdomain = "a" + shard + "." if cdn_subdomain else ""
|
||||
prefix = "http://" + subdomain + cname
|
||||
else:
|
||||
subdomain = cloud_name + "-res" if private_cdn else "res"
|
||||
if cdn_subdomain: subdomain = subdomain + "-" + shard
|
||||
prefix = "http://" + subdomain + ".cloudinary.com"
|
||||
|
||||
if shared_domain: prefix += "/" + cloud_name
|
||||
|
||||
return prefix
|
||||
|
||||
|
||||
def merge(*dict_args):
|
||||
result = None
|
||||
for dictionary in dict_args:
|
||||
if dictionary is not None:
|
||||
if result is None:
|
||||
result = dictionary.copy()
|
||||
else:
|
||||
result.update(dictionary)
|
||||
return result
|
||||
|
||||
|
||||
def cloudinary_url(source, **options):
|
||||
original_source = source
|
||||
|
||||
type = options.pop("type", "upload")
|
||||
if type == 'fetch':
|
||||
options["fetch_format"] = options.get("fetch_format", options.pop("format", None))
|
||||
transformation, options = generate_transformation_string(**options)
|
||||
|
||||
resource_type = options.pop("resource_type", "image")
|
||||
version = options.pop("version", None)
|
||||
format = options.pop("format", None)
|
||||
cdn_subdomain = options.pop("cdn_subdomain", cloudinary.config().cdn_subdomain)
|
||||
secure_cdn_subdomain = options.pop("secure_cdn_subdomain", cloudinary.config().secure_cdn_subdomain)
|
||||
cname = options.pop("cname", cloudinary.config().cname)
|
||||
shorten = options.pop("shorten", cloudinary.config().shorten)
|
||||
|
||||
cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name or None)
|
||||
if cloud_name is None:
|
||||
raise ValueError("Must supply cloud_name in tag or in configuration")
|
||||
secure = options.pop("secure", cloudinary.config().secure)
|
||||
private_cdn = options.pop("private_cdn", cloudinary.config().private_cdn)
|
||||
secure_distribution = options.pop("secure_distribution", cloudinary.config().secure_distribution)
|
||||
sign_url = options.pop("sign_url", cloudinary.config().sign_url)
|
||||
api_secret = options.pop("api_secret", cloudinary.config().api_secret)
|
||||
url_suffix = options.pop("url_suffix", None)
|
||||
use_root_path = options.pop("use_root_path", cloudinary.config().use_root_path)
|
||||
auth_token = options.pop("auth_token", None)
|
||||
if auth_token is not False:
|
||||
auth_token = merge(cloudinary.config().auth_token, auth_token)
|
||||
|
||||
if (not source) or type == "upload" and re.match(r'^https?:', source):
|
||||
return original_source, options
|
||||
|
||||
resource_type, type = finalize_resource_type(resource_type, type, url_suffix, use_root_path, shorten)
|
||||
source, source_to_sign = finalize_source(source, format, url_suffix)
|
||||
|
||||
if source_to_sign.find("/") >= 0 \
|
||||
and not re.match(r'^https?:/', source_to_sign) \
|
||||
and not re.match(r'^v[0-9]+', source_to_sign) \
|
||||
and not version:
|
||||
version = "1"
|
||||
if version: version = "v" + str(version)
|
||||
|
||||
transformation = re.sub(r'([^:])/+', r'\1/', transformation)
|
||||
|
||||
signature = None
|
||||
if sign_url and not auth_token:
|
||||
to_sign = "/".join(__compact([transformation, source_to_sign]))
|
||||
signature = "s--" + to_string(
|
||||
base64.urlsafe_b64encode(hashlib.sha1(to_bytes(to_sign + api_secret)).digest())[0:8]) + "--"
|
||||
|
||||
prefix = unsigned_download_url_prefix(source, cloud_name, private_cdn, cdn_subdomain, secure_cdn_subdomain, cname,
|
||||
secure, secure_distribution)
|
||||
source = "/".join(__compact([prefix, resource_type, type, signature, transformation, version, source]))
|
||||
if sign_url and auth_token:
|
||||
path = urlparse(source).path
|
||||
token = cloudinary.auth_token.generate( **merge(auth_token, {"url": path}))
|
||||
source = "%s?%s" % (source, token)
|
||||
return source, options
|
||||
|
||||
|
||||
def cloudinary_api_url(action='upload', **options):
|
||||
cloudinary_prefix = options.get("upload_prefix", cloudinary.config().upload_prefix) or "https://api.cloudinary.com"
|
||||
cloud_name = options.get("cloud_name", cloudinary.config().cloud_name)
|
||||
if not cloud_name: raise ValueError("Must supply cloud_name")
|
||||
resource_type = options.get("resource_type", "image")
|
||||
return "/".join([cloudinary_prefix, "v1_1", cloud_name, resource_type, action])
|
||||
|
||||
|
||||
# Based on ruby's CGI::unescape. In addition does not escape / :
|
||||
def smart_escape(source,unsafe = r"([^a-zA-Z0-9_.\-\/:]+)"):
|
||||
def pack(m):
|
||||
return to_bytes('%' + "%".join(["%02X" % x for x in struct.unpack('B' * len(m.group(1)), m.group(1))]).upper())
|
||||
return to_string(re.sub(to_bytes(unsafe), pack, to_bytes(source)))
|
||||
|
||||
|
||||
def random_public_id():
|
||||
return ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(16))
|
||||
|
||||
|
||||
def signed_preloaded_image(result):
|
||||
filename = ".".join([x for x in [result["public_id"], result["format"]] if x])
|
||||
path = "/".join([result["resource_type"], "upload", "v" + str(result["version"]), filename])
|
||||
return path + "#" + result["signature"]
|
||||
|
||||
|
||||
def now():
|
||||
return str(int(time.time()))
|
||||
|
||||
|
||||
def private_download_url(public_id, format, **options):
|
||||
cloudinary_params = sign_request({
|
||||
"timestamp": now(),
|
||||
"public_id": public_id,
|
||||
"format": format,
|
||||
"type": options.get("type"),
|
||||
"attachment": options.get("attachment"),
|
||||
"expires_at": options.get("expires_at")
|
||||
}, options)
|
||||
|
||||
return cloudinary_api_url("download", **options) + "?" + urlencode(cloudinary_params)
|
||||
|
||||
|
||||
def zip_download_url(tag, **options):
|
||||
cloudinary_params = sign_request({
|
||||
"timestamp": now(),
|
||||
"tag": tag,
|
||||
"transformation": generate_transformation_string(**options)[0]
|
||||
}, options)
|
||||
|
||||
return cloudinary_api_url("download_tag.zip", **options) + "?" + urlencode(cloudinary_params)
|
||||
|
||||
|
||||
def bracketize_seq(params):
|
||||
url_params = dict()
|
||||
for param_name in params:
|
||||
param_value = params[param_name]
|
||||
if isinstance(param_value, list):
|
||||
param_name += "[]"
|
||||
url_params[param_name] = param_value
|
||||
return url_params
|
||||
|
||||
|
||||
def download_archive_url(**options):
|
||||
params = options.copy()
|
||||
params.update(mode="download")
|
||||
cloudinary_params = sign_request(archive_params(**params), options)
|
||||
return cloudinary_api_url("generate_archive", **options) + "?" + urlencode(bracketize_seq(cloudinary_params), True)
|
||||
|
||||
|
||||
def download_zip_url(**options):
|
||||
new_options = options.copy()
|
||||
new_options.update(target_format="zip")
|
||||
return download_archive_url(**new_options)
|
||||
|
||||
def generate_auth_token(**options):
|
||||
token_options = merge(cloudinary.config().auth_token, options)
|
||||
return auth_token.generate(**token_options)
|
||||
|
||||
def archive_params(**options):
|
||||
if options.get("timestamp") is None:
|
||||
timestamp = now()
|
||||
else:
|
||||
timestamp = options.get("timestamp")
|
||||
params = {
|
||||
"allow_missing": options.get("allow_missing"),
|
||||
"async": options.get("async"),
|
||||
"expires_at": options.get("expires_at"),
|
||||
"flatten_folders": options.get("flatten_folders"),
|
||||
"flatten_transformations": options.get("flatten_transformations"),
|
||||
"keep_derived": options.get("keep_derived"),
|
||||
"mode": options.get("mode"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"phash": options.get("phash"),
|
||||
"prefixes": options.get("prefixes") and build_array(options.get("prefixes")),
|
||||
"public_ids": options.get("public_ids") and build_array(options.get("public_ids")),
|
||||
"skip_transformation_name": options.get("skip_transformation_name"),
|
||||
"tags": options.get("tags") and build_array(options.get("tags")),
|
||||
"target_format": options.get("target_format"),
|
||||
"target_public_id": options.get("target_public_id"),
|
||||
"target_tags": options.get("target_tags") and build_array(options.get("target_tags")),
|
||||
"timestamp": timestamp,
|
||||
"transformations": build_eager(options.get("transformations")),
|
||||
"type": options.get("type"),
|
||||
"use_original_filename": options.get("use_original_filename"),
|
||||
}
|
||||
return params
|
||||
|
||||
|
||||
def build_eager(transformations):
|
||||
if transformations is None:
|
||||
return None
|
||||
eager = []
|
||||
for tr in build_array(transformations):
|
||||
if isinstance(tr, string_types):
|
||||
single_eager = tr
|
||||
else:
|
||||
ext = tr.get("format")
|
||||
single_eager = "/".join([x for x in [generate_transformation_string(**tr)[0], ext] if x])
|
||||
eager.append(single_eager)
|
||||
return "|".join(eager)
|
||||
|
||||
|
||||
def build_custom_headers(headers):
|
||||
if headers is None:
|
||||
return None
|
||||
elif isinstance(headers, list):
|
||||
pass
|
||||
elif isinstance(headers, dict):
|
||||
headers = [k + ": " + v for k, v in headers.items()]
|
||||
else:
|
||||
return headers
|
||||
return "\n".join(headers)
|
||||
|
||||
|
||||
def build_upload_params(**options):
|
||||
params = {"timestamp": now(),
|
||||
"transformation": generate_transformation_string(**options)[0],
|
||||
"public_id": options.get("public_id"),
|
||||
"callback": options.get("callback"),
|
||||
"format": options.get("format"),
|
||||
"type": options.get("type"),
|
||||
"backup": options.get("backup"),
|
||||
"faces": options.get("faces"),
|
||||
"image_metadata": options.get("image_metadata"),
|
||||
"exif": options.get("exif"),
|
||||
"colors": options.get("colors"),
|
||||
"headers": build_custom_headers(options.get("headers")),
|
||||
"eager": build_eager(options.get("eager")),
|
||||
"use_filename": options.get("use_filename"),
|
||||
"unique_filename": options.get("unique_filename"),
|
||||
"discard_original_filename": options.get("discard_original_filename"),
|
||||
"invalidate": options.get("invalidate"),
|
||||
"notification_url": options.get("notification_url"),
|
||||
"eager_notification_url": options.get("eager_notification_url"),
|
||||
"eager_async": options.get("eager_async"),
|
||||
"proxy": options.get("proxy"),
|
||||
"folder": options.get("folder"),
|
||||
"overwrite": options.get("overwrite"),
|
||||
"tags": options.get("tags") and ",".join(build_array(options["tags"])),
|
||||
"allowed_formats": options.get("allowed_formats") and ",".join(build_array(options["allowed_formats"])),
|
||||
"face_coordinates": encode_double_array(options.get("face_coordinates")),
|
||||
"custom_coordinates": encode_double_array(options.get("custom_coordinates")),
|
||||
"context": encode_context(options.get("context")),
|
||||
"moderation": options.get("moderation"),
|
||||
"raw_convert": options.get("raw_convert"),
|
||||
"quality_override": options.get("quality_override"),
|
||||
"ocr": options.get("ocr"),
|
||||
"categorization": options.get("categorization"),
|
||||
"detection": options.get("detection"),
|
||||
"similarity_search": options.get("similarity_search"),
|
||||
"background_removal": options.get("background_removal"),
|
||||
"upload_preset": options.get("upload_preset"),
|
||||
"phash": options.get("phash"),
|
||||
"return_delete_token": options.get("return_delete_token"),
|
||||
"auto_tagging": options.get("auto_tagging") and str(options.get("auto_tagging")),
|
||||
"responsive_breakpoints": generate_responsive_breakpoints_string(options.get("responsive_breakpoints")),
|
||||
"async": options.get("async"),
|
||||
"access_control": options.get("access_control") and json_encode(build_list_of_dicts(options.get("access_control")))}
|
||||
return params
|
||||
|
||||
|
||||
def __process_text_options(layer, layer_parameter):
|
||||
font_family = layer.get("font_family")
|
||||
font_size = layer.get("font_size")
|
||||
keywords = []
|
||||
for attr, default_value in __LAYER_KEYWORD_PARAMS:
|
||||
attr_value = layer.get(attr)
|
||||
if attr_value != default_value and attr_value is not None:
|
||||
keywords.append(attr_value)
|
||||
|
||||
letter_spacing = layer.get("letter_spacing")
|
||||
if letter_spacing is not None:
|
||||
keywords.append("letter_spacing_" + str(letter_spacing))
|
||||
|
||||
line_spacing = layer.get("line_spacing")
|
||||
if line_spacing is not None:
|
||||
keywords.append("line_spacing_" + str(line_spacing))
|
||||
|
||||
if font_size is None and font_family is None and len(keywords) == 0:
|
||||
return None
|
||||
|
||||
if font_family is None:
|
||||
raise ValueError("Must supply font_family for text in " + layer_parameter)
|
||||
|
||||
if font_size is None:
|
||||
raise ValueError("Must supply font_size for text in " + layer_parameter)
|
||||
|
||||
keywords.insert(0, font_size)
|
||||
keywords.insert(0, font_family)
|
||||
|
||||
return '_'.join([str(k) for k in keywords])
|
||||
|
||||
|
||||
def process_layer(layer, layer_parameter):
|
||||
if isinstance(layer, string_types) and layer.startswith("fetch:"):
|
||||
layer = {"url": layer[len('fetch:'):]}
|
||||
if not isinstance(layer, dict):
|
||||
return layer
|
||||
|
||||
resource_type = layer.get("resource_type")
|
||||
text = layer.get("text")
|
||||
type = layer.get("type")
|
||||
public_id = layer.get("public_id")
|
||||
format = layer.get("format")
|
||||
fetch = layer.get("url")
|
||||
components = list()
|
||||
|
||||
if text is not None and resource_type is None:
|
||||
resource_type = "text"
|
||||
|
||||
if fetch and resource_type is None:
|
||||
resource_type = "fetch"
|
||||
|
||||
if public_id is not None and format is not None:
|
||||
public_id = public_id + "." + format
|
||||
|
||||
if public_id is None and resource_type != "text" and resource_type != "fetch":
|
||||
raise ValueError("Must supply public_id for for non-text " + layer_parameter)
|
||||
|
||||
if resource_type is not None and resource_type != "image":
|
||||
components.append(resource_type)
|
||||
|
||||
if type is not None and type != "upload":
|
||||
components.append(type)
|
||||
|
||||
if resource_type == "text" or resource_type == "subtitles":
|
||||
if public_id is None and text is None:
|
||||
raise ValueError("Must supply either text or public_id in " + layer_parameter)
|
||||
|
||||
text_options = __process_text_options(layer, layer_parameter)
|
||||
|
||||
if text_options is not None:
|
||||
components.append(text_options)
|
||||
|
||||
if public_id is not None:
|
||||
public_id = public_id.replace("/", ':')
|
||||
components.append(public_id)
|
||||
|
||||
if text is not None:
|
||||
var_pattern = VAR_NAME_RE
|
||||
match = re.findall(var_pattern,text)
|
||||
|
||||
parts= filter(lambda p: p is not None, re.split(var_pattern,text))
|
||||
encoded_text = []
|
||||
for part in parts:
|
||||
if re.match(var_pattern,part):
|
||||
encoded_text.append(part)
|
||||
else:
|
||||
encoded_text.append(smart_escape(smart_escape(part, r"([,/])")))
|
||||
|
||||
text = ''.join(encoded_text)
|
||||
# text = text.replace("%2C", "%252C")
|
||||
# text = text.replace("/", "%252F")
|
||||
components.append(text)
|
||||
elif resource_type == "fetch":
|
||||
b64 = base64_encode_url(fetch)
|
||||
components.append(b64)
|
||||
else:
|
||||
public_id = public_id.replace("/", ':')
|
||||
components.append(public_id)
|
||||
|
||||
return ':'.join(components)
|
||||
|
||||
IF_OPERATORS = {
|
||||
"=": 'eq',
|
||||
"!=": 'ne',
|
||||
"<": 'lt',
|
||||
">": 'gt',
|
||||
"<=": 'lte',
|
||||
">=": 'gte',
|
||||
"&&": 'and',
|
||||
"||": 'or',
|
||||
"*": 'mul',
|
||||
"/": 'div',
|
||||
"+": 'add',
|
||||
"-": 'sub'
|
||||
}
|
||||
|
||||
PREDEFINED_VARS = {
|
||||
"aspect_ratio": "ar",
|
||||
"current_page": "cp",
|
||||
"face_count": "fc",
|
||||
"height": "h",
|
||||
"initial_aspect_ratio": "iar",
|
||||
"initial_height": "ih",
|
||||
"initial_width": "iw",
|
||||
"page_count": "pc",
|
||||
"page_x": "px",
|
||||
"page_y": "py",
|
||||
"tags": "tags",
|
||||
"width": "w"
|
||||
}
|
||||
|
||||
replaceRE = "((\\|\\||>=|<=|&&|!=|>|=|<|/|-|\\+|\\*)(?=[ _])|" + '|'.join(PREDEFINED_VARS.keys())+ ")"
|
||||
|
||||
|
||||
def translate_if(match):
|
||||
name = match.group(0)
|
||||
return IF_OPERATORS.get(name,
|
||||
PREDEFINED_VARS.get(name,
|
||||
name))
|
||||
|
||||
def process_conditional(conditional):
|
||||
if conditional is None:
|
||||
return conditional
|
||||
result = normalize_expression(conditional)
|
||||
return result
|
||||
|
||||
def normalize_expression(expression):
|
||||
if re.match(r'^!.+!$',str(expression)): # quoted string
|
||||
return expression
|
||||
elif expression:
|
||||
result = str(expression)
|
||||
result = re.sub(replaceRE, translate_if, result)
|
||||
result = re.sub('[ _]+', '_', result)
|
||||
return result
|
||||
else:
|
||||
return expression
|
||||
|
||||
def __join_pair(key, value):
|
||||
if value is None or value == "":
|
||||
return None
|
||||
elif value is True:
|
||||
return key
|
||||
else:
|
||||
return u"{0}=\"{1}\"".format(key, value)
|
||||
|
||||
|
||||
def html_attrs(attrs, only=None):
|
||||
return ' '.join(sorted([__join_pair(key, value) for key, value in attrs.items() if only is None or key in only]))
|
||||
|
||||
|
||||
def __safe_value(v):
|
||||
if isinstance(v, bool):
|
||||
return "1" if v else "0"
|
||||
else:
|
||||
return v
|
||||
|
||||
|
||||
def __crc(source):
|
||||
return str((zlib.crc32(to_bytearray(source)) & 0xffffffff) % 5 + 1)
|
||||
|
||||
|
||||
def __compact(array):
|
||||
return filter(lambda x: x, array)
|
||||
|
||||
|
||||
def base64_encode_url(url):
|
||||
"""
|
||||
Returns the Base64-decoded version of url.
|
||||
The method tries to unquote the url because quoting it
|
||||
|
||||
:param str url:
|
||||
the url to encode. the value is URIdecoded and then
|
||||
re-encoded before converting to base64 representation
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
url = unquote(url)
|
||||
except:
|
||||
pass
|
||||
url = smart_escape(url)
|
||||
b64 = base64.b64encode(url.encode('utf-8'))
|
||||
return b64.decode('ascii')
|
||||
|
||||
|
||||
def __json_serializer(obj):
|
||||
"""JSON serializer for objects not serializable by default json code"""
|
||||
if isinstance(obj, (datetime, date)):
|
||||
return obj.isoformat()
|
||||
raise TypeError("Object of type %s is not JSON serializable" % type(obj))
|
@@ -1,59 +0,0 @@
|
||||
from math import floor
|
||||
|
||||
import time
|
||||
import sys
|
||||
import threading
|
||||
import functools
|
||||
|
||||
|
||||
def clamp(value):
|
||||
'''
|
||||
Clamp integer between 1 and max
|
||||
|
||||
There must be at least 1 method invocation
|
||||
made over the time period. Make sure the
|
||||
value passed is at least 1 and is not a
|
||||
fraction of an invocation.
|
||||
|
||||
:param float value: The number of method invocations.
|
||||
:return: Clamped number of invocations.
|
||||
:rtype: int
|
||||
'''
|
||||
return max(1, min(sys.maxsize, floor(value)))
|
||||
|
||||
|
||||
class RateLimitDecorator:
|
||||
def __init__(self, period=1, every=1.0):
|
||||
self.frequency = abs(every) / float(clamp(period))
|
||||
self.last_called = 0.0
|
||||
self.lock = threading.RLock()
|
||||
|
||||
def __call__(self, func):
|
||||
'''
|
||||
Extend the behaviour of the following
|
||||
function, forwarding method invocations
|
||||
if the time window hes elapsed.
|
||||
|
||||
:param function func: The function to decorate.
|
||||
:return: Decorated function.
|
||||
:rtype: function
|
||||
'''
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
'''Decorator wrapper function'''
|
||||
with self.lock:
|
||||
elapsed = time.time() - self.last_called
|
||||
left_to_wait = self.frequency - elapsed
|
||||
if left_to_wait > 0:
|
||||
time.sleep(left_to_wait)
|
||||
self.last_called = time.time()
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
rate_limited = RateLimitDecorator
|
||||
|
||||
|
||||
__all__ = [
|
||||
'rate_limited'
|
||||
]
|
@@ -1,9 +0,0 @@
|
||||
class Version(object):
|
||||
'''Version of the package'''
|
||||
|
||||
def __setattr__(self, *args):
|
||||
raise TypeError('cannot modify immutable instance')
|
||||
__delattr__ = __setattr__
|
||||
|
||||
def __init__(self, num):
|
||||
super(Version, self).__setattr__('number', num)
|
421
lib/six.py
421
lib/six.py
@@ -1,6 +1,4 @@
|
||||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
# Copyright (c) 2010-2014 Benjamin Peterson
|
||||
# Copyright (c) 2010-2017 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
@@ -20,17 +18,24 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.6.1"
|
||||
__version__ = "1.11.0"
|
||||
|
||||
|
||||
# Useful for very coarse version differentiation.
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
PY34 = sys.version_info[0:2] >= (3, 4)
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
@@ -53,6 +58,7 @@ else:
|
||||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
@@ -83,14 +89,14 @@ class _LazyDescr(object):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
try:
|
||||
result = self._resolve()
|
||||
except ImportError:
|
||||
# See the nice big comment in MovedModule.__getattr__.
|
||||
raise AttributeError("%s could not be imported " % self.name)
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
# This is a bit ugly, but it avoids running this again.
|
||||
delattr(obj.__class__, self.name)
|
||||
# This is a bit ugly, but it avoids running this again by
|
||||
# removing this descriptor.
|
||||
delattr(obj.__class__, self.name)
|
||||
except AttributeError:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
@@ -109,22 +115,7 @@ class MovedModule(_LazyDescr):
|
||||
return _import_module(self.mod)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
# It turns out many Python frameworks like to traverse sys.modules and
|
||||
# try to load various attributes. This causes problems if this is a
|
||||
# platform-specific module on the wrong platform, like _winreg on
|
||||
# Unixes. Therefore, we silently pretend unimportable modules do not
|
||||
# have any attributes. See issues #51, #53, #56, and #63 for the full
|
||||
# tales of woe.
|
||||
#
|
||||
# First, if possible, avoid loading the module just to look at __file__,
|
||||
# __name__, or __path__.
|
||||
if (attr in ("__file__", "__name__", "__path__") and
|
||||
self.mod not in sys.modules):
|
||||
raise AttributeError(attr)
|
||||
try:
|
||||
_module = self._resolve()
|
||||
except ImportError:
|
||||
raise AttributeError(attr)
|
||||
_module = self._resolve()
|
||||
value = getattr(_module, attr)
|
||||
setattr(self, attr, value)
|
||||
return value
|
||||
@@ -170,9 +161,75 @@ class MovedAttribute(_LazyDescr):
|
||||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
class _SixMetaPathImporter(object):
|
||||
|
||||
"""
|
||||
A meta path importer to import six.moves and its submodules.
|
||||
|
||||
This class implements a PEP302 finder and loader. It should be compatible
|
||||
with Python 2.5 and all existing versions of Python3
|
||||
"""
|
||||
|
||||
def __init__(self, six_module_name):
|
||||
self.name = six_module_name
|
||||
self.known_modules = {}
|
||||
|
||||
def _add_module(self, mod, *fullnames):
|
||||
for fullname in fullnames:
|
||||
self.known_modules[self.name + "." + fullname] = mod
|
||||
|
||||
def _get_module(self, fullname):
|
||||
return self.known_modules[self.name + "." + fullname]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname in self.known_modules:
|
||||
return self
|
||||
return None
|
||||
|
||||
def __get_module(self, fullname):
|
||||
try:
|
||||
return self.known_modules[fullname]
|
||||
except KeyError:
|
||||
raise ImportError("This loader does not know module " + fullname)
|
||||
|
||||
def load_module(self, fullname):
|
||||
try:
|
||||
# in case of a reload
|
||||
return sys.modules[fullname]
|
||||
except KeyError:
|
||||
pass
|
||||
mod = self.__get_module(fullname)
|
||||
if isinstance(mod, MovedModule):
|
||||
mod = mod._resolve()
|
||||
else:
|
||||
mod.__loader__ = self
|
||||
sys.modules[fullname] = mod
|
||||
return mod
|
||||
|
||||
def is_package(self, fullname):
|
||||
"""
|
||||
Return true, if the named module is a package.
|
||||
|
||||
We need this method to get correct spec objects with
|
||||
Python 3.4 (see PEP451)
|
||||
"""
|
||||
return hasattr(self.__get_module(fullname), "__path__")
|
||||
|
||||
def get_code(self, fullname):
|
||||
"""Return None
|
||||
|
||||
Required, if is_package is implemented"""
|
||||
self.__get_module(fullname) # eventually raises ImportError
|
||||
return None
|
||||
get_source = get_code # same as get_code
|
||||
|
||||
_importer = _SixMetaPathImporter(__name__)
|
||||
|
||||
|
||||
class _MovedItems(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects"""
|
||||
__path__ = [] # mark as package
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
@@ -180,28 +237,37 @@ _moved_attributes = [
|
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("intern", "__builtin__", "sys"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
|
||||
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
|
||||
MovedAttribute("getoutput", "commands", "subprocess"),
|
||||
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
|
||||
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||
MovedAttribute("UserList", "UserList", "collections"),
|
||||
MovedAttribute("UserString", "UserString", "collections"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||
MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||
@@ -233,21 +299,28 @@ _moved_attributes = [
|
||||
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||
MovedModule("xmlrpc_server", "xmlrpclib", "xmlrpc.server"),
|
||||
MovedModule("winreg", "_winreg"),
|
||||
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||
]
|
||||
# Add windows specific modules.
|
||||
if sys.platform == "win32":
|
||||
_moved_attributes += [
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
if isinstance(attr, MovedModule):
|
||||
sys.modules[__name__ + ".moves." + attr.name] = attr
|
||||
_importer._add_module(attr, "moves." + attr.name)
|
||||
del attr
|
||||
|
||||
_MovedItems._moved_attributes = _moved_attributes
|
||||
|
||||
moves = sys.modules[__name__ + ".moves"] = _MovedItems(__name__ + ".moves")
|
||||
moves = _MovedItems(__name__ + ".moves")
|
||||
_importer._add_module(moves, "moves")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_parse(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||
|
||||
|
||||
@@ -266,8 +339,17 @@ _urllib_parse_moved_attributes = [
|
||||
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
|
||||
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splittag", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splituser", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitvalue", "urllib", "urllib.parse"),
|
||||
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
|
||||
]
|
||||
for attr in _urllib_parse_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||
@@ -275,10 +357,12 @@ del attr
|
||||
|
||||
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_parse"] = sys.modules[__name__ + ".moves.urllib.parse"] = Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse")
|
||||
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||
"moves.urllib_parse", "moves.urllib.parse")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_error(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||
|
||||
|
||||
@@ -293,10 +377,12 @@ del attr
|
||||
|
||||
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_error"] = sys.modules[__name__ + ".moves.urllib.error"] = Module_six_moves_urllib_error(__name__ + ".moves.urllib.error")
|
||||
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||
"moves.urllib_error", "moves.urllib.error")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_request(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||
|
||||
|
||||
@@ -334,6 +420,8 @@ _urllib_request_moved_attributes = [
|
||||
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||
MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
|
||||
MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
|
||||
]
|
||||
for attr in _urllib_request_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||
@@ -341,10 +429,12 @@ del attr
|
||||
|
||||
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_request"] = sys.modules[__name__ + ".moves.urllib.request"] = Module_six_moves_urllib_request(__name__ + ".moves.urllib.request")
|
||||
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||
"moves.urllib_request", "moves.urllib.request")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_response(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||
|
||||
|
||||
@@ -360,10 +450,12 @@ del attr
|
||||
|
||||
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_response"] = sys.modules[__name__ + ".moves.urllib.response"] = Module_six_moves_urllib_response(__name__ + ".moves.urllib.response")
|
||||
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||
"moves.urllib_response", "moves.urllib.response")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||
|
||||
|
||||
@@ -376,22 +468,25 @@ del attr
|
||||
|
||||
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib_robotparser"] = sys.modules[__name__ + ".moves.urllib.robotparser"] = Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser")
|
||||
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||
|
||||
|
||||
class Module_six_moves_urllib(types.ModuleType):
|
||||
|
||||
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||
parse = sys.modules[__name__ + ".moves.urllib_parse"]
|
||||
error = sys.modules[__name__ + ".moves.urllib_error"]
|
||||
request = sys.modules[__name__ + ".moves.urllib_request"]
|
||||
response = sys.modules[__name__ + ".moves.urllib_response"]
|
||||
robotparser = sys.modules[__name__ + ".moves.urllib_robotparser"]
|
||||
__path__ = [] # mark as package
|
||||
parse = _importer._get_module("moves.urllib_parse")
|
||||
error = _importer._get_module("moves.urllib_error")
|
||||
request = _importer._get_module("moves.urllib_request")
|
||||
response = _importer._get_module("moves.urllib_response")
|
||||
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||
|
||||
def __dir__(self):
|
||||
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||
|
||||
|
||||
sys.modules[__name__ + ".moves.urllib"] = Module_six_moves_urllib(__name__ + ".moves.urllib")
|
||||
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||
"moves.urllib")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
@@ -418,11 +513,6 @@ if PY3:
|
||||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
_func_globals = "__globals__"
|
||||
|
||||
_iterkeys = "keys"
|
||||
_itervalues = "values"
|
||||
_iteritems = "items"
|
||||
_iterlists = "lists"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
@@ -432,11 +522,6 @@ else:
|
||||
_func_defaults = "func_defaults"
|
||||
_func_globals = "func_globals"
|
||||
|
||||
_iterkeys = "iterkeys"
|
||||
_itervalues = "itervalues"
|
||||
_iteritems = "iteritems"
|
||||
_iterlists = "iterlists"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
@@ -459,6 +544,9 @@ if PY3:
|
||||
|
||||
create_bound_method = types.MethodType
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return func
|
||||
|
||||
Iterator = object
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
@@ -467,6 +555,9 @@ else:
|
||||
def create_bound_method(func, obj):
|
||||
return types.MethodType(func, obj, obj.__class__)
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return types.MethodType(func, None, cls)
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
@@ -485,69 +576,124 @@ get_function_defaults = operator.attrgetter(_func_defaults)
|
||||
get_function_globals = operator.attrgetter(_func_globals)
|
||||
|
||||
|
||||
def iterkeys(d, **kw):
|
||||
"""Return an iterator over the keys of a dictionary."""
|
||||
return iter(getattr(d, _iterkeys)(**kw))
|
||||
if PY3:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.keys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
"""Return an iterator over the values of a dictionary."""
|
||||
return iter(getattr(d, _itervalues)(**kw))
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.values(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
"""Return an iterator over the (key, value) pairs of a dictionary."""
|
||||
return iter(getattr(d, _iteritems)(**kw))
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.items(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
"""Return an iterator over the (key, [values]) pairs of a dictionary."""
|
||||
return iter(getattr(d, _iterlists)(**kw))
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.lists(**kw))
|
||||
|
||||
viewkeys = operator.methodcaller("keys")
|
||||
|
||||
viewvalues = operator.methodcaller("values")
|
||||
|
||||
viewitems = operator.methodcaller("items")
|
||||
else:
|
||||
def iterkeys(d, **kw):
|
||||
return d.iterkeys(**kw)
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return d.itervalues(**kw)
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return d.iteritems(**kw)
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return d.iterlists(**kw)
|
||||
|
||||
viewkeys = operator.methodcaller("viewkeys")
|
||||
|
||||
viewvalues = operator.methodcaller("viewvalues")
|
||||
|
||||
viewitems = operator.methodcaller("viewitems")
|
||||
|
||||
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||
_add_doc(iteritems,
|
||||
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||
_add_doc(iterlists,
|
||||
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
unichr = chr
|
||||
if sys.version_info[1] <= 1:
|
||||
def int2byte(i):
|
||||
return bytes((i,))
|
||||
else:
|
||||
# This is about 2x faster than the implementation above on 3.2+
|
||||
int2byte = operator.methodcaller("to_bytes", 1, "big")
|
||||
import struct
|
||||
int2byte = struct.Struct(">B").pack
|
||||
del struct
|
||||
byte2int = operator.itemgetter(0)
|
||||
indexbytes = operator.getitem
|
||||
iterbytes = iter
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
_assertCountEqual = "assertCountEqual"
|
||||
if sys.version_info[1] <= 1:
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
else:
|
||||
_assertRaisesRegex = "assertRaisesRegex"
|
||||
_assertRegex = "assertRegex"
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
# Workaround for standalone backslash
|
||||
|
||||
def u(s):
|
||||
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||
unichr = unichr
|
||||
int2byte = chr
|
||||
|
||||
def byte2int(bs):
|
||||
return ord(bs[0])
|
||||
|
||||
def indexbytes(buf, i):
|
||||
return ord(buf[i])
|
||||
def iterbytes(buf):
|
||||
return (ord(byte) for byte in buf)
|
||||
iterbytes = functools.partial(itertools.imap, ord)
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_assertCountEqual = "assertItemsEqual"
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
def assertCountEqual(self, *args, **kwargs):
|
||||
return getattr(self, _assertCountEqual)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRaisesRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
if PY3:
|
||||
exec_ = getattr(moves.builtins, "exec")
|
||||
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
try:
|
||||
if value is None:
|
||||
value = tp()
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
finally:
|
||||
value = None
|
||||
tb = None
|
||||
|
||||
else:
|
||||
def exec_(_code_, _globs_=None, _locs_=None):
|
||||
@@ -562,12 +708,35 @@ else:
|
||||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb
|
||||
try:
|
||||
raise tp, value, tb
|
||||
finally:
|
||||
tb = None
|
||||
""")
|
||||
|
||||
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
try:
|
||||
if from_value is None:
|
||||
raise value
|
||||
raise value from from_value
|
||||
finally:
|
||||
value = None
|
||||
""")
|
||||
elif sys.version_info[:2] > (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
try:
|
||||
raise value from from_value
|
||||
finally:
|
||||
value = None
|
||||
""")
|
||||
else:
|
||||
def raise_from(value, from_value):
|
||||
raise value
|
||||
|
||||
|
||||
print_ = getattr(moves.builtins, "print", None)
|
||||
if print_ is None:
|
||||
def print_(*args, **kwargs):
|
||||
@@ -575,13 +744,14 @@ if print_ is None:
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
# If the file has an encoding, encode unicode with it.
|
||||
if (isinstance(fp, file) and
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
errors = getattr(fp, "errors", None)
|
||||
if errors is None:
|
||||
errors = "strict"
|
||||
@@ -622,25 +792,100 @@ if print_ is None:
|
||||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
if sys.version_info[:2] < (3, 3):
|
||||
_print = print_
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
fp = kwargs.get("file", sys.stdout)
|
||||
flush = kwargs.pop("flush", False)
|
||||
_print(*args, **kwargs)
|
||||
if flush and fp is not None:
|
||||
fp.flush()
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
if sys.version_info[0:2] < (3, 4):
|
||||
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES):
|
||||
def wrapper(f):
|
||||
f = functools.wraps(wrapped, assigned, updated)(f)
|
||||
f.__wrapped__ = wrapped
|
||||
return f
|
||||
return wrapper
|
||||
else:
|
||||
wraps = functools.wraps
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta("NewBase", bases, {})
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(type):
|
||||
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
|
||||
@classmethod
|
||||
def __prepare__(cls, name, this_bases):
|
||||
return meta.__prepare__(name, bases)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
def add_metaclass(metaclass):
|
||||
"""Class decorator for creating a class with a metaclass."""
|
||||
def wrapper(cls):
|
||||
orig_vars = cls.__dict__.copy()
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
slots = orig_vars.get('__slots__')
|
||||
if slots is not None:
|
||||
if isinstance(slots, str):
|
||||
slots = [slots]
|
||||
for slots_var in slots:
|
||||
orig_vars.pop(slots_var)
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||
return wrapper
|
||||
|
||||
|
||||
def python_2_unicode_compatible(klass):
|
||||
"""
|
||||
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||
Under Python 3 it does nothing.
|
||||
|
||||
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||
returning text and apply this decorator to the class.
|
||||
"""
|
||||
if PY2:
|
||||
if '__str__' not in klass.__dict__:
|
||||
raise ValueError("@python_2_unicode_compatible cannot be applied "
|
||||
"to %s because it doesn't define __str__()." %
|
||||
klass.__name__)
|
||||
klass.__unicode__ = klass.__str__
|
||||
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
|
||||
return klass
|
||||
|
||||
|
||||
# Complete the moves implementation.
|
||||
# This code is at the end of this module to speed up module loading.
|
||||
# Turn this module into a package.
|
||||
__path__ = [] # required for PEP 302 and PEP 451
|
||||
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||
if globals().get("__spec__") is not None:
|
||||
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||
# Remove other six meta path importers, since they cause problems. This can
|
||||
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||
# this for some reason.)
|
||||
if sys.meta_path:
|
||||
for i, importer in enumerate(sys.meta_path):
|
||||
# Here's some real nastiness: Another "instance" of the six module might
|
||||
# be floating around. Therefore, we can't use isinstance() to check for
|
||||
# the six meta path importer, since the other six instance will have
|
||||
# inserted an importer with different class.
|
||||
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||
importer.name == __name__):
|
||||
del sys.meta_path[i]
|
||||
break
|
||||
del i, importer
|
||||
# Finally, add the importer to the meta path import hook.
|
||||
sys.meta_path.append(_importer)
|
||||
|
@@ -243,7 +243,7 @@ def initialize(config_file):
|
||||
# Check for new versions
|
||||
if CONFIG.CHECK_GITHUB_ON_STARTUP and CONFIG.CHECK_GITHUB:
|
||||
try:
|
||||
LATEST_VERSION = versioncheck.checkGithub()
|
||||
LATEST_VERSION = versioncheck.check_github()
|
||||
except:
|
||||
logger.exception(u"Unhandled exception")
|
||||
LATEST_VERSION = CURRENT_VERSION
|
||||
@@ -378,8 +378,8 @@ def initialize_scheduler():
|
||||
# Update check
|
||||
github_minutes = CONFIG.CHECK_GITHUB_INTERVAL if CONFIG.CHECK_GITHUB_INTERVAL and CONFIG.CHECK_GITHUB else 0
|
||||
|
||||
schedule_job(versioncheck.checkGithub, 'Check GitHub for updates',
|
||||
hours=0, minutes=github_minutes, seconds=0, args=(bool(CONFIG.PLEXPY_AUTO_UPDATE),))
|
||||
schedule_job(versioncheck.check_github, 'Check GitHub for updates',
|
||||
hours=0, minutes=github_minutes, seconds=0, args=(bool(CONFIG.PLEXPY_AUTO_UPDATE), True))
|
||||
|
||||
backup_hours = CONFIG.BACKUP_INTERVAL if 1 <= CONFIG.BACKUP_INTERVAL <= 24 else 6
|
||||
|
||||
@@ -637,7 +637,7 @@ def dbcheck():
|
||||
# newsletters table :: This table keeps record of the newsletter settings
|
||||
c_db.execute(
|
||||
'CREATE TABLE IF NOT EXISTS newsletters (id INTEGER PRIMARY KEY AUTOINCREMENT, '
|
||||
'agent_id INTEGER, agent_name TEXT, agent_label TEXT, '
|
||||
'agent_id INTEGER, agent_name TEXT, agent_label TEXT, id_name TEXT NOT NULL DEFAULT "", '
|
||||
'friendly_name TEXT, newsletter_config TEXT, email_config TEXT, '
|
||||
'subject TEXT, body TEXT, message TEXT, '
|
||||
'cron TEXT NOT NULL DEFAULT "0 0 * * 0", active INTEGER DEFAULT 0)'
|
||||
@@ -648,7 +648,7 @@ def dbcheck():
|
||||
'CREATE TABLE IF NOT EXISTS newsletter_log (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, '
|
||||
'newsletter_id INTEGER, agent_id INTEGER, agent_name TEXT, notify_action TEXT, '
|
||||
'subject_text TEXT, body_text TEXT, message_text TEXT, start_date TEXT, end_date TEXT, '
|
||||
'start_time INTEGER, end_time INTEGER, uuid TEXT UNIQUE, success INTEGER DEFAULT 0)'
|
||||
'start_time INTEGER, end_time INTEGER, uuid TEXT UNIQUE, filename TEXT, success INTEGER DEFAULT 0)'
|
||||
)
|
||||
|
||||
# recently_added table :: This table keeps record of recently added items
|
||||
@@ -693,6 +693,12 @@ def dbcheck():
|
||||
'img_hash TEXT, imgur_title TEXT, imgur_url TEXT, delete_hash TEXT)'
|
||||
)
|
||||
|
||||
# cloudinary_lookup table :: This table keeps record of the Cloudinary uploads
|
||||
c_db.execute(
|
||||
'CREATE TABLE IF NOT EXISTS cloudinary_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, '
|
||||
'img_hash TEXT, cloudinary_title TEXT, cloudinary_url TEXT)'
|
||||
)
|
||||
|
||||
# Upgrade sessions table from earlier versions
|
||||
try:
|
||||
c_db.execute('SELECT started FROM sessions')
|
||||
@@ -1489,6 +1495,24 @@ def dbcheck():
|
||||
'ALTER TABLE newsletter_log ADD COLUMN end_time INTEGER'
|
||||
)
|
||||
|
||||
# Upgrade newsletter_log table from earlier versions
|
||||
try:
|
||||
c_db.execute('SELECT filename FROM newsletter_log')
|
||||
except sqlite3.OperationalError:
|
||||
logger.debug(u"Altering database. Updating database table newsletter_log.")
|
||||
c_db.execute(
|
||||
'ALTER TABLE newsletter_log ADD COLUMN filename TEXT'
|
||||
)
|
||||
|
||||
# Upgrade newsletters table from earlier versions
|
||||
try:
|
||||
c_db.execute('SELECT id_name FROM newsletters')
|
||||
except sqlite3.OperationalError:
|
||||
logger.debug(u"Altering database. Updating database table newsletters.")
|
||||
c_db.execute(
|
||||
'ALTER TABLE newsletters ADD COLUMN id_name TEXT NOT NULL DEFAULT ""'
|
||||
)
|
||||
|
||||
# Upgrade library_sections table from earlier versions (remove UNIQUE constraint on section_id)
|
||||
try:
|
||||
result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="library_sections"').fetchone()
|
||||
@@ -1687,9 +1711,10 @@ def dbcheck():
|
||||
|
||||
for row in result:
|
||||
img_hash = notification_handler.set_hash_image_info(
|
||||
rating_key=row['rating_key'], width=600, height=1000, fallback='poster')
|
||||
data_factory.set_imgur_info(img_hash=img_hash, imgur_title=row['poster_title'],
|
||||
imgur_url=row['poster_url'], delete_hash=row['delete_hash'])
|
||||
rating_key=row['rating_key'], width=1000, height=1500, fallback='poster')
|
||||
data_factory.set_img_info(img_hash=img_hash, imgur_title=row['poster_title'],
|
||||
imgur_url=row['poster_url'], delete_hash=row['delete_hash'],
|
||||
service='imgur')
|
||||
|
||||
db.action('DROP TABLE poster_urls')
|
||||
except sqlite3.OperationalError:
|
||||
|
@@ -530,7 +530,7 @@ General optional parameters:
|
||||
return data
|
||||
|
||||
def _api_responds(self, result_type='error', data=None, msg=''):
|
||||
""" Formats the result to a predefined dict so we can hange it the to
|
||||
""" Formats the result to a predefined dict so we can change it the to
|
||||
the desired output by _api_out_as """
|
||||
|
||||
if data is None:
|
||||
@@ -637,19 +637,19 @@ General optional parameters:
|
||||
except:
|
||||
pass
|
||||
|
||||
# Fallback if we cant "parse the reponse"
|
||||
# Fallback if we cant "parse the response"
|
||||
if ret is None:
|
||||
ret = result
|
||||
|
||||
if ret or self._api_result_type == 'success':
|
||||
if ret is not None or self._api_result_type == 'success':
|
||||
# To allow override for restart etc
|
||||
# if the call returns some data we are gonna assume its a success
|
||||
self._api_result_type = 'success'
|
||||
else:
|
||||
self._api_result_type = 'error'
|
||||
|
||||
# Since some of them metods use a api like response for the ui
|
||||
# {result: error, message: 'Some shit happend'}
|
||||
# Since some of them methods use a api like response for the ui
|
||||
# {result: error, message: 'Some shit happened'}
|
||||
if isinstance(ret, dict):
|
||||
if ret.get('message'):
|
||||
self._api_msg = ret.pop('message', None)
|
||||
|
@@ -519,13 +519,16 @@ NEWSLETTER_PARAMETERS = [
|
||||
'category': 'Global',
|
||||
'parameters': [
|
||||
{'name': 'Server Name', 'type': 'str', 'value': 'server_name', 'description': 'The name of your Plex Server.'},
|
||||
{'name': 'Start Date', 'type': 'str', 'value': 'start_date', 'description': 'The start date of the newesletter.'},
|
||||
{'name': 'End Date', 'type': 'str', 'value': 'end_date', 'description': 'The end date of the newesletter.'},
|
||||
{'name': 'Start Date', 'type': 'str', 'value': 'start_date', 'description': 'The start date of the newsletter.'},
|
||||
{'name': 'End Date', 'type': 'str', 'value': 'end_date', 'description': 'The end date of the newsletter.'},
|
||||
{'name': 'Week Number', 'type': 'int', 'value': 'week_number', 'description': 'The week number of the year.'},
|
||||
{'name': 'Newsletter Time Frame', 'type': 'int', 'value': 'newsletter_time_frame', 'description': 'The time frame included in the newsletter.'},
|
||||
{'name': 'Newsletter Time Frame Units', 'type': 'str', 'value': 'newsletter_time_frame_units', 'description': 'The time frame units included in the newsletter.'},
|
||||
{'name': 'Newsletter URL', 'type': 'str', 'value': 'newsletter_url', 'description': 'The self-hosted URL to the newsletter.'},
|
||||
{'name': 'Newsletter Static URL', 'type': 'str', 'value': 'newsletter_static_url', 'description': 'The static self-hosted URL to the latest scheduled newsletter for the agent.'},
|
||||
{'name': 'Newsletter UUID', 'type': 'str', 'value': 'newsletter_uuid', 'description': 'The unique identifier for the newsletter.'},
|
||||
{'name': 'Newsletter ID', 'type': 'int', 'value': 'newsletter_id', 'description': 'The unique ID number for the newsletter agent.'},
|
||||
{'name': 'Newsletter ID Name', 'type': 'int', 'value': 'newsletter_id_name', 'description': 'The unique ID name for the newsletter agent.'},
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -115,6 +115,9 @@ _CONFIG_DEFINITIONS = {
|
||||
'CHECK_GITHUB_INTERVAL': (int, 'General', 360),
|
||||
'CHECK_GITHUB_ON_STARTUP': (int, 'General', 1),
|
||||
'CLEANUP_FILES': (int, 'General', 0),
|
||||
'CLOUDINARY_CLOUD_NAME': (str, 'Cloudinary', ''),
|
||||
'CLOUDINARY_API_KEY': (str, 'Cloudinary', ''),
|
||||
'CLOUDINARY_API_SECRET': (str, 'Cloudinary', ''),
|
||||
'CONFIG_VERSION': (int, 'Advanced', 0),
|
||||
'DO_NOT_OVERRIDE_GIT_BRANCH': (int, 'General', 0),
|
||||
'EMAIL_ENABLED': (int, 'Email', 0),
|
||||
@@ -309,9 +312,11 @@ _CONFIG_DEFINITIONS = {
|
||||
'MONITOR_REMOTE_ACCESS': (int, 'Monitoring', 0),
|
||||
'MONITORING_INTERVAL': (int, 'Monitoring', 60),
|
||||
'MONITORING_USE_WEBSOCKET': (int, 'Monitoring', 0),
|
||||
'NEWSLETTER_CUSTOM_DIR': (str, 'Newsletter', ''),
|
||||
'NEWSLETTER_TEMPLATES': (str, 'Newsletter', 'newsletters'),
|
||||
'NEWSLETTER_DIR': (str, 'Newsletter', ''),
|
||||
'NEWSLETTER_SELF_HOSTED': (int, 'Newsletter', 0),
|
||||
'NEWSLETTER_STATIC_URL': (int, 'Newsletter', 0),
|
||||
'NMA_APIKEY': (str, 'NMA', ''),
|
||||
'NMA_ENABLED': (int, 'NMA', 0),
|
||||
'NMA_PRIORITY': (int, 'NMA', 0),
|
||||
|
@@ -1132,12 +1132,12 @@ class DataFactory(object):
|
||||
|
||||
return ip_address
|
||||
|
||||
def get_imgur_info(self, img=None, rating_key=None, width=None, height=None,
|
||||
opacity=None, background=None, blur=None, fallback=None,
|
||||
order_by=''):
|
||||
def get_img_info(self, img=None, rating_key=None, width=None, height=None,
|
||||
opacity=None, background=None, blur=None, fallback=None,
|
||||
order_by='', service=None):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
imgur_info = []
|
||||
img_info = []
|
||||
|
||||
where_params = []
|
||||
args = []
|
||||
@@ -1174,52 +1174,94 @@ class DataFactory(object):
|
||||
if order_by:
|
||||
order_by = 'ORDER BY ' + order_by + ' DESC'
|
||||
|
||||
query = 'SELECT imgur_title, imgur_url FROM imgur_lookup ' \
|
||||
'JOIN image_hash_lookup ON imgur_lookup.img_hash = image_hash_lookup.img_hash ' \
|
||||
'%s %s' % (where, order_by)
|
||||
if service == 'imgur':
|
||||
query = 'SELECT imgur_title AS img_title, imgur_url AS img_url FROM imgur_lookup ' \
|
||||
'JOIN image_hash_lookup ON imgur_lookup.img_hash = image_hash_lookup.img_hash ' \
|
||||
'%s %s' % (where, order_by)
|
||||
elif service == 'cloudinary':
|
||||
query = 'SELECT cloudinary_title AS img_title, cloudinary_url AS img_url FROM cloudinary_lookup ' \
|
||||
'JOIN image_hash_lookup ON cloudinary_lookup.img_hash = image_hash_lookup.img_hash ' \
|
||||
'%s %s' % (where, order_by)
|
||||
else:
|
||||
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_img_info: "
|
||||
"service not provided.")
|
||||
return img_info
|
||||
|
||||
try:
|
||||
imgur_info = monitor_db.select(query, args=args)
|
||||
img_info = monitor_db.select(query, args=args)
|
||||
except Exception as e:
|
||||
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_imgur_info: %s." % e)
|
||||
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_img_info: %s." % e)
|
||||
|
||||
return imgur_info
|
||||
return img_info
|
||||
|
||||
def set_imgur_info(self, img_hash=None, imgur_title=None, imgur_url=None, delete_hash=None):
|
||||
def set_img_info(self, img_hash=None, img_title=None, img_url=None, delete_hash=None, service=None):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
keys = {'img_hash': img_hash}
|
||||
values = {'imgur_title': imgur_title,
|
||||
'imgur_url': imgur_url,
|
||||
'delete_hash': delete_hash}
|
||||
|
||||
monitor_db.upsert('imgur_lookup', key_dict=keys, value_dict=values)
|
||||
if service == 'imgur':
|
||||
table = 'imgur_lookup'
|
||||
values = {'imgur_title': img_title,
|
||||
'imgur_url': img_url,
|
||||
'delete_hash': delete_hash}
|
||||
elif service == 'cloudinary':
|
||||
table = 'cloudinary_lookup'
|
||||
values = {'cloudinary_title': img_title,
|
||||
'cloudinary_url': img_url}
|
||||
else:
|
||||
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for set_img_info: "
|
||||
"service not provided.")
|
||||
return
|
||||
|
||||
def delete_imgur_info(self, rating_key=None):
|
||||
monitor_db.upsert(table, key_dict=keys, value_dict=values)
|
||||
|
||||
def delete_img_info(self, rating_key=None, service=None):
|
||||
monitor_db = database.MonitorDatabase()
|
||||
|
||||
if rating_key:
|
||||
query = 'SELECT imgur_title, delete_hash, fallback FROM imgur_lookup ' \
|
||||
'JOIN image_hash_lookup ON imgur_lookup.img_hash = image_hash_lookup.img_hash ' \
|
||||
'WHERE rating_key = ? '
|
||||
args = [rating_key]
|
||||
results = monitor_db.select(query, args=args)
|
||||
service = service or helpers.get_img_service()
|
||||
|
||||
for imgur_info in results:
|
||||
if imgur_info['delete_hash']:
|
||||
helpers.delete_from_imgur(delete_hash=imgur_info['delete_hash'],
|
||||
img_title=imgur_info['imgur_title'],
|
||||
fallback=imgur_info['fallback'])
|
||||
if service == 'imgur':
|
||||
# Delete from Imgur
|
||||
query = 'SELECT imgur_title, delete_hash, fallback FROM imgur_lookup ' \
|
||||
'JOIN image_hash_lookup ON imgur_lookup.img_hash = image_hash_lookup.img_hash ' \
|
||||
'WHERE rating_key = ? '
|
||||
args = [rating_key]
|
||||
results = monitor_db.select(query, args=args)
|
||||
|
||||
logger.info(u"Tautulli DataFactory :: Deleting Imgur info for rating_key %s from the database."
|
||||
% rating_key)
|
||||
result = monitor_db.action('DELETE FROM imgur_lookup WHERE img_hash '
|
||||
'IN (SELECT img_hash FROM image_hash_lookup WHERE rating_key = ?)',
|
||||
[rating_key])
|
||||
for imgur_info in results:
|
||||
if imgur_info['delete_hash']:
|
||||
helpers.delete_from_imgur(delete_hash=imgur_info['delete_hash'],
|
||||
img_title=imgur_info['imgur_title'],
|
||||
fallback=imgur_info['fallback'])
|
||||
|
||||
return True if result else False
|
||||
logger.info(u"Tautulli DataFactory :: Deleting Imgur info for rating_key %s from the database."
|
||||
% rating_key)
|
||||
result = monitor_db.action('DELETE FROM imgur_lookup WHERE img_hash '
|
||||
'IN (SELECT img_hash FROM image_hash_lookup WHERE rating_key = ?)',
|
||||
[rating_key])
|
||||
|
||||
def get_poster_info(self, rating_key='', metadata=None):
|
||||
elif service == 'cloudinary':
|
||||
# Delete from Cloudinary
|
||||
helpers.delete_from_cloudinary(rating_key=rating_key)
|
||||
|
||||
logger.info(u"Tautulli DataFactory :: Deleting Cloudinary info for rating_key %s from the database."
|
||||
% rating_key)
|
||||
result = monitor_db.action('DELETE FROM cloudinary_lookup WHERE img_hash '
|
||||
'IN (SELECT img_hash FROM image_hash_lookup WHERE rating_key = ?)',
|
||||
[rating_key])
|
||||
|
||||
else:
|
||||
logger.error(u"Tautulli DataFactory :: Unable to delete hosted images: invalid service '%s' provided."
|
||||
% service)
|
||||
|
||||
return service
|
||||
|
||||
else:
|
||||
logger.error(u"Tautulli DataFactory :: Unable to delete hosted images: rating_key not provided.")
|
||||
return False
|
||||
|
||||
def get_poster_info(self, rating_key='', metadata=None, service=None):
|
||||
poster_key = ''
|
||||
if str(rating_key).isdigit():
|
||||
poster_key = rating_key
|
||||
@@ -1234,10 +1276,17 @@ class DataFactory(object):
|
||||
poster_info = {}
|
||||
|
||||
if poster_key:
|
||||
imgur_info = self.get_imgur_info(rating_key=poster_key, order_by='height', fallback='poster')
|
||||
if imgur_info:
|
||||
poster_info = {'poster_title': imgur_info[0]['imgur_title'],
|
||||
'poster_url': imgur_info[0]['imgur_url']}
|
||||
service = service or helpers.get_img_service()
|
||||
|
||||
if service:
|
||||
img_info = self.get_img_info(rating_key=poster_key,
|
||||
order_by='height',
|
||||
fallback='poster',
|
||||
service=service)
|
||||
if img_info:
|
||||
poster_info = {'poster_title': img_info[0]['img_title'],
|
||||
'poster_url': img_info[0]['img_url'],
|
||||
'img_service': service.capitalize()}
|
||||
|
||||
return poster_info
|
||||
|
||||
|
@@ -14,6 +14,10 @@
|
||||
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import base64
|
||||
import cloudinary
|
||||
from cloudinary.api import delete_resources_by_tag
|
||||
from cloudinary.uploader import upload
|
||||
from cloudinary.utils import cloudinary_url
|
||||
import datetime
|
||||
from functools import wraps
|
||||
import geoip2.database, geoip2.errors
|
||||
@@ -28,7 +32,6 @@ import math
|
||||
import maxminddb
|
||||
from operator import itemgetter
|
||||
import os
|
||||
from ratelimit import rate_limited
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
@@ -705,17 +708,26 @@ def anon_url(*url):
|
||||
return '' if None in url else '%s%s' % (plexpy.CONFIG.ANON_REDIRECT, ''.join(str(s) for s in url))
|
||||
|
||||
|
||||
@rate_limited(450, 3600)
|
||||
def get_img_service(include_self=False):
|
||||
if plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS == 1:
|
||||
return 'imgur'
|
||||
elif plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS == 2 and include_self:
|
||||
return 'self-hosted'
|
||||
elif plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS == 3:
|
||||
return 'cloudinary'
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def upload_to_imgur(img_data, img_title='', rating_key='', fallback=''):
|
||||
""" Uploads an image to Imgur """
|
||||
client_id = plexpy.CONFIG.IMGUR_CLIENT_ID
|
||||
img_url = delete_hash = ''
|
||||
|
||||
if not client_id:
|
||||
logger.error(u"Tautulli Helpers :: Cannot upload poster to Imgur. No Imgur client id specified in the settings.")
|
||||
if not plexpy.CONFIG.IMGUR_CLIENT_ID:
|
||||
logger.error(u"Tautulli Helpers :: Cannot upload image to Imgur. No Imgur client id specified in the settings.")
|
||||
return img_url, delete_hash
|
||||
|
||||
headers = {'Authorization': 'Client-ID %s' % client_id}
|
||||
headers = {'Authorization': 'Client-ID %s' % plexpy.CONFIG.IMGUR_CLIENT_ID}
|
||||
data = {'image': base64.b64encode(img_data),
|
||||
'title': img_title.encode('utf-8'),
|
||||
'name': str(rating_key) + '.png',
|
||||
@@ -743,9 +755,11 @@ def upload_to_imgur(img_data, img_title='', rating_key='', fallback=''):
|
||||
|
||||
def delete_from_imgur(delete_hash, img_title='', fallback=''):
|
||||
""" Deletes an image from Imgur """
|
||||
client_id = plexpy.CONFIG.IMGUR_CLIENT_ID
|
||||
if not plexpy.CONFIG.IMGUR_CLIENT_ID:
|
||||
logger.error(u"Tautulli Helpers :: Cannot delete image from Imgur. No Imgur client id specified in the settings.")
|
||||
return False
|
||||
|
||||
headers = {'Authorization': 'Client-ID %s' % client_id}
|
||||
headers = {'Authorization': 'Client-ID %s' % plexpy.CONFIG.IMGUR_CLIENT_ID}
|
||||
|
||||
response, err_msg, req_msg = request.request_response2('https://api.imgur.com/3/image/%s' % delete_hash, 'DELETE',
|
||||
headers=headers)
|
||||
@@ -761,6 +775,90 @@ def delete_from_imgur(delete_hash, img_title='', fallback=''):
|
||||
return False
|
||||
|
||||
|
||||
def upload_to_cloudinary(img_data, img_title='', rating_key='', fallback=''):
|
||||
""" Uploads an image to Cloudinary """
|
||||
img_url = ''
|
||||
|
||||
if not plexpy.CONFIG.CLOUDINARY_CLOUD_NAME or not plexpy.CONFIG.CLOUDINARY_API_KEY or not plexpy.CONFIG.CLOUDINARY_API_SECRET:
|
||||
logger.error(u"Tautulli Helpers :: Cannot upload image to Cloudinary. Cloudinary settings not specified in the settings.")
|
||||
return img_url
|
||||
|
||||
cloudinary.config(
|
||||
cloud_name=plexpy.CONFIG.CLOUDINARY_CLOUD_NAME,
|
||||
api_key=plexpy.CONFIG.CLOUDINARY_API_KEY,
|
||||
api_secret=plexpy.CONFIG.CLOUDINARY_API_SECRET
|
||||
)
|
||||
|
||||
try:
|
||||
response = upload('data:image/png;base64,{}'.format(base64.b64encode(img_data)),
|
||||
public_id='{}_{}'.format(fallback, rating_key),
|
||||
tags=[fallback, str(rating_key)],
|
||||
context={'title': img_title.encode('utf-8'), 'rating_key': str(rating_key), 'fallback': fallback})
|
||||
logger.debug(u"Tautulli Helpers :: Image '{}' ({}) uploaded to Cloudinary.".format(img_title, fallback))
|
||||
img_url = response.get('url', '')
|
||||
except Exception as e:
|
||||
logger.error(u"Tautulli Helpers :: Unable to upload image '{}' ({}) to Cloudinary: {}".format(img_title, fallback, e))
|
||||
|
||||
return img_url
|
||||
|
||||
|
||||
def delete_from_cloudinary(rating_key):
|
||||
""" Deletes an image from Cloudinary """
|
||||
if not plexpy.CONFIG.CLOUDINARY_CLOUD_NAME or not plexpy.CONFIG.CLOUDINARY_API_KEY or not plexpy.CONFIG.CLOUDINARY_API_SECRET:
|
||||
logger.error(u"Tautulli Helpers :: Cannot delete image from Cloudinary. Cloudinary settings not specified in the settings.")
|
||||
return False
|
||||
|
||||
cloudinary.config(
|
||||
cloud_name=plexpy.CONFIG.CLOUDINARY_CLOUD_NAME,
|
||||
api_key=plexpy.CONFIG.CLOUDINARY_API_KEY,
|
||||
api_secret=plexpy.CONFIG.CLOUDINARY_API_SECRET
|
||||
)
|
||||
|
||||
delete_resources_by_tag(str(rating_key))
|
||||
|
||||
logger.debug(u"Tautulli Helpers :: Deleted images from Cloudinary with rating_key {}.".format(rating_key))
|
||||
return True
|
||||
|
||||
|
||||
def cloudinary_transform(rating_key=None, width=1000, height=1500, opacity=100, background='000000', blur=0,
|
||||
img_format='png', img_title='', fallback=None):
|
||||
url = ''
|
||||
|
||||
if not plexpy.CONFIG.CLOUDINARY_CLOUD_NAME or not plexpy.CONFIG.CLOUDINARY_API_KEY or not plexpy.CONFIG.CLOUDINARY_API_SECRET:
|
||||
logger.error(u"Tautulli Helpers :: Cannot transform image on Cloudinary. Cloudinary settings not specified in the settings.")
|
||||
return url
|
||||
|
||||
cloudinary.config(
|
||||
cloud_name=plexpy.CONFIG.CLOUDINARY_CLOUD_NAME,
|
||||
api_key=plexpy.CONFIG.CLOUDINARY_API_KEY,
|
||||
api_secret=plexpy.CONFIG.CLOUDINARY_API_SECRET
|
||||
)
|
||||
|
||||
img_options = {'format': img_format,
|
||||
'version': int(time.time())}
|
||||
|
||||
if width != 1000:
|
||||
img_options['width'] = str(width)
|
||||
img_options['crop'] = 'fill'
|
||||
if height != 1500:
|
||||
img_options['height'] = str(height)
|
||||
img_options['crop'] = 'fill'
|
||||
if opacity != 100:
|
||||
img_options['opacity'] = opacity
|
||||
if background != '000000':
|
||||
img_options['background'] = 'rgb:{}'.format(background)
|
||||
if blur != 0:
|
||||
img_options['effect'] = 'blur:{}'.format(blur * 100)
|
||||
|
||||
try:
|
||||
url, options = cloudinary_url('{}_{}'.format(fallback, rating_key), **img_options)
|
||||
logger.debug(u"Tautulli Helpers :: Image '{}' ({}) transformed on Cloudinary.".format(img_title, fallback))
|
||||
except Exception as e:
|
||||
logger.error(u"Tautulli Helpers :: Unable to transform image '{}' ({}) on Cloudinary: {}".format(img_title, fallback, e))
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def cache_image(url, image=None):
|
||||
"""
|
||||
Saves an image to the cache directory.
|
||||
@@ -972,7 +1070,10 @@ def get_plexpy_url(hostname=None):
|
||||
s.connect(('<broadcast>', 0))
|
||||
hostname = s.getsockname()[0]
|
||||
except socket.error:
|
||||
hostname = socket.gethostbyname(socket.gethostname())
|
||||
try:
|
||||
hostname = socket.gethostbyname(socket.gethostname())
|
||||
except socket.gaierror:
|
||||
pass
|
||||
|
||||
if not hostname:
|
||||
hostname = 'localhost'
|
||||
|
@@ -86,7 +86,9 @@ def notify(newsletter_id=None, notify_action=None, **kwargs):
|
||||
body = newsletter_config['body']
|
||||
message = newsletter_config['message']
|
||||
|
||||
newsletter_agent = newsletters.get_agent_class(agent_id=newsletter_config['agent_id'],
|
||||
newsletter_agent = newsletters.get_agent_class(newsletter_id=newsletter_id,
|
||||
newsletter_id_name=newsletter_config['id_name'],
|
||||
agent_id=newsletter_config['agent_id'],
|
||||
config=newsletter_config['config'],
|
||||
email_config=newsletter_config['email_config'],
|
||||
subject=subject,
|
||||
@@ -100,6 +102,7 @@ def notify(newsletter_id=None, notify_action=None, **kwargs):
|
||||
subject=newsletter_agent.subject_formatted,
|
||||
body=newsletter_agent.body_formatted,
|
||||
message=newsletter_agent.message_formatted,
|
||||
filename=newsletter_agent.filename_formatted,
|
||||
start_date=newsletter_agent.start_date.format('YYYY-MM-DD'),
|
||||
end_date=newsletter_agent.end_date.format('YYYY-MM-DD'),
|
||||
start_time=newsletter_agent.start_time,
|
||||
@@ -114,7 +117,7 @@ def notify(newsletter_id=None, notify_action=None, **kwargs):
|
||||
return True
|
||||
|
||||
|
||||
def set_notify_state(newsletter, notify_action, subject, body, message,
|
||||
def set_notify_state(newsletter, notify_action, subject, body, message, filename,
|
||||
start_date, end_date, start_time, end_time, newsletter_uuid):
|
||||
|
||||
if newsletter and notify_action:
|
||||
@@ -133,7 +136,8 @@ def set_notify_state(newsletter, notify_action, subject, body, message,
|
||||
'start_date': start_date,
|
||||
'end_date': end_date,
|
||||
'start_time': start_time,
|
||||
'end_time': end_time}
|
||||
'end_time': end_time,
|
||||
'filename': filename}
|
||||
|
||||
db.upsert(table_name='newsletter_log', key_dict=keys, value_dict=values)
|
||||
return db.last_insert_id()
|
||||
@@ -149,20 +153,29 @@ def set_notify_success(newsletter_log_id):
|
||||
db.upsert(table_name='newsletter_log', key_dict=keys, value_dict=values)
|
||||
|
||||
|
||||
def get_newsletter(newsletter_uuid):
|
||||
def get_newsletter(newsletter_uuid=None, newsletter_id_name=None):
|
||||
db = database.MonitorDatabase()
|
||||
result = db.select_single('SELECT newsletter_id, start_date, end_date FROM newsletter_log '
|
||||
'WHERE uuid = ?', [newsletter_uuid])
|
||||
|
||||
if newsletter_uuid:
|
||||
result = db.select_single('SELECT start_date, end_date, uuid, filename FROM newsletter_log '
|
||||
'WHERE uuid = ?', [newsletter_uuid])
|
||||
elif newsletter_id_name:
|
||||
result = db.select_single('SELECT start_date, end_date, uuid, filename FROM newsletter_log '
|
||||
'JOIN newsletters ON newsletters.id = newsletter_log.newsletter_id '
|
||||
'WHERE id_name = ? AND notify_action != "test" '
|
||||
'ORDER BY timestamp DESC LIMIT 1', [newsletter_id_name])
|
||||
else:
|
||||
result = None
|
||||
|
||||
if result:
|
||||
newsletter_id = result['newsletter_id']
|
||||
newsletter_uuid = result['uuid']
|
||||
start_date = result['start_date']
|
||||
end_date = result['end_date']
|
||||
newsletter_file = result['filename'] or 'newsletter_%s-%s_%s.html' % (start_date.replace('-', ''),
|
||||
end_date.replace('-', ''),
|
||||
newsletter_uuid)
|
||||
|
||||
newsletter_file = 'newsletter_%s-%s_%s.html' % (start_date.replace('-', ''),
|
||||
end_date.replace('-', ''),
|
||||
newsletter_uuid)
|
||||
newsletter_folder = plexpy.CONFIG.NEWSLETTER_DIR
|
||||
newsletter_folder = plexpy.CONFIG.NEWSLETTER_DIR or os.path.join(plexpy.DATA_DIR, 'newsletters')
|
||||
newsletter_file_fp = os.path.join(newsletter_folder, newsletter_file)
|
||||
|
||||
if newsletter_file in os.listdir(newsletter_folder):
|
||||
@@ -173,4 +186,4 @@ def get_newsletter(newsletter_uuid):
|
||||
except OSError as e:
|
||||
logger.error(u"Tautulli NewsletterHandler :: Failed to retrieve newsletter '%s': %s" % (newsletter_uuid, e))
|
||||
else:
|
||||
logger.warn(u"Tautulli NewsletterHandler :: Newsletter '%s' file is missing." % newsletter_uuid)
|
||||
logger.warn(u"Tautulli NewsletterHandler :: Newsletter file '%s' is missing." % newsletter_file)
|
||||
|
@@ -63,12 +63,14 @@ def available_notification_actions():
|
||||
return actions
|
||||
|
||||
|
||||
def get_agent_class(agent_id=None, config=None, email_config=None, start_date=None, end_date=None,
|
||||
subject=None, body=None, message=None):
|
||||
def get_agent_class(newsletter_id=None, newsletter_id_name=None, agent_id=None, config=None, email_config=None,
|
||||
start_date=None, end_date=None, subject=None, body=None, message=None):
|
||||
if str(agent_id).isdigit():
|
||||
agent_id = int(agent_id)
|
||||
|
||||
kwargs = {'config': config,
|
||||
kwargs = {'newsletter_id': newsletter_id,
|
||||
'newsletter_id_name': newsletter_id_name,
|
||||
'config': config,
|
||||
'email_config': email_config,
|
||||
'start_date': start_date,
|
||||
'end_date': end_date,
|
||||
@@ -138,7 +140,9 @@ def get_newsletter_config(newsletter_id=None):
|
||||
subject = result.pop('subject')
|
||||
body = result.pop('body')
|
||||
message = result.pop('message')
|
||||
newsletter_agent = get_agent_class(agent_id=result['agent_id'], config=config, email_config=email_config,
|
||||
newsletter_agent = get_agent_class(newsletter_id=newsletter_id, newsletter_id_name=result['id_name'],
|
||||
agent_id=result['agent_id'],
|
||||
config=config, email_config=email_config,
|
||||
subject=subject, body=body, message=message)
|
||||
except Exception as e:
|
||||
logger.error(u"Tautulli Newsletters :: Failed to get newsletter config options: %s." % e)
|
||||
@@ -176,6 +180,7 @@ def add_newsletter_config(agent_id=None, **kwargs):
|
||||
values = {'agent_id': agent['id'],
|
||||
'agent_name': agent['name'],
|
||||
'agent_label': agent['label'],
|
||||
'id_name': '',
|
||||
'friendly_name': '',
|
||||
'newsletter_config': json.dumps(agent_class.config),
|
||||
'email_config': json.dumps(agent_class.email_config),
|
||||
@@ -223,13 +228,15 @@ def set_newsletter_config(newsletter_id=None, agent_id=None, **kwargs):
|
||||
body = kwargs.pop('body')
|
||||
message = kwargs.pop('message')
|
||||
|
||||
agent_class = get_agent_class(agent_id=agent['id'], config=newsletter_config, email_config=email_config,
|
||||
agent_class = get_agent_class(agent_id=agent['id'],
|
||||
config=newsletter_config, email_config=email_config,
|
||||
subject=subject, body=body, message=message)
|
||||
|
||||
keys = {'id': newsletter_id}
|
||||
values = {'agent_id': agent['id'],
|
||||
'agent_name': agent['name'],
|
||||
'agent_label': agent['label'],
|
||||
'id_name': kwargs.get('id_name', ''),
|
||||
'friendly_name': kwargs.get('friendly_name', ''),
|
||||
'newsletter_config': json.dumps(agent_class.config),
|
||||
'email_config': json.dumps(agent_class.email_config),
|
||||
@@ -267,8 +274,11 @@ def send_newsletter(newsletter_id=None, subject=None, body=None, message=None, n
|
||||
|
||||
|
||||
def serve_template(templatename, **kwargs):
|
||||
interface_dir = os.path.join(str(plexpy.PROG_DIR), 'data/interfaces/')
|
||||
template_dir = os.path.join(str(interface_dir), plexpy.CONFIG.NEWSLETTER_TEMPLATES)
|
||||
if plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR:
|
||||
template_dir = plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR
|
||||
else:
|
||||
interface_dir = os.path.join(str(plexpy.PROG_DIR), 'data/interfaces/')
|
||||
template_dir = os.path.join(str(interface_dir), plexpy.CONFIG.NEWSLETTER_TEMPLATES)
|
||||
|
||||
_hplookup = TemplateLookup(directories=[template_dir], default_filters=['unicode', 'h'])
|
||||
|
||||
@@ -299,22 +309,27 @@ class Newsletter(object):
|
||||
'time_frame': 7,
|
||||
'time_frame_units': 'days',
|
||||
'formatted': 1,
|
||||
'notifier_id': 0}
|
||||
'notifier_id': 0,
|
||||
'filename': '',
|
||||
'save_only': 0}
|
||||
_DEFAULT_EMAIL_CONFIG = EMAIL().return_default_config()
|
||||
_DEFAULT_EMAIL_CONFIG['from_name'] = 'Tautulli Newsletter'
|
||||
_DEFAULT_EMAIL_CONFIG['notifier_id'] = 0
|
||||
_DEFAULT_SUBJECT = 'Tautulli Newsletter'
|
||||
_DEFAULT_BODY = 'View the newsletter here: {newsletter_url}'
|
||||
_DEFAULT_MESSAGE = ''
|
||||
_DEFAULT_FILENAME = 'newsletter_{newsletter_uuid}.html'
|
||||
_TEMPLATE_MASTER = ''
|
||||
_TEMPLATE = ''
|
||||
|
||||
def __init__(self, config=None, email_config=None, start_date=None, end_date=None,
|
||||
subject=None, body=None, message=None):
|
||||
def __init__(self, newsletter_id=None, newsletter_id_name=None, config=None, email_config=None,
|
||||
start_date=None, end_date=None, subject=None, body=None, message=None):
|
||||
self.config = self.set_config(config=config, default=self._DEFAULT_CONFIG)
|
||||
self.email_config = self.set_config(config=email_config, default=self._DEFAULT_EMAIL_CONFIG)
|
||||
self.uuid = generate_newsletter_uuid()
|
||||
|
||||
self.newsletter_id = newsletter_id
|
||||
self.newsletter_id_name = newsletter_id_name or ''
|
||||
self.start_date = None
|
||||
self.end_date = None
|
||||
|
||||
@@ -346,7 +361,13 @@ class Newsletter(object):
|
||||
self.subject = subject or self._DEFAULT_SUBJECT
|
||||
self.body = body or self._DEFAULT_BODY
|
||||
self.message = message or self._DEFAULT_MESSAGE
|
||||
self.filename = self.config['filename'] or self._DEFAULT_FILENAME
|
||||
|
||||
if not self.filename.endswith('.html'):
|
||||
self.filename += '.html'
|
||||
|
||||
self.subject_formatted, self.body_formatted, self.message_formatted = self.build_text()
|
||||
self.filename_formatted = self.build_filename()
|
||||
|
||||
self.data = {}
|
||||
self.newsletter = None
|
||||
@@ -421,13 +442,15 @@ class Newsletter(object):
|
||||
return False
|
||||
|
||||
self._save()
|
||||
|
||||
if self.config['save_only']:
|
||||
return True
|
||||
|
||||
return self._send()
|
||||
|
||||
def _save(self):
|
||||
newsletter_file = 'newsletter_%s-%s_%s.html' % (self.start_date.format('YYYYMMDD'),
|
||||
self.end_date.format('YYYYMMDD'),
|
||||
self.uuid)
|
||||
newsletter_folder = plexpy.CONFIG.NEWSLETTER_DIR
|
||||
newsletter_file = self.filename_formatted
|
||||
newsletter_folder = plexpy.CONFIG.NEWSLETTER_DIR or os.path.join(plexpy.DATA_DIR, 'newsletters')
|
||||
newsletter_file_fp = os.path.join(newsletter_folder, newsletter_file)
|
||||
|
||||
# In case the user has deleted it manually
|
||||
@@ -440,9 +463,9 @@ class Newsletter(object):
|
||||
if '<!-- IGNORE SAVE -->' not in line:
|
||||
n_file.write(line + '\r\n')
|
||||
|
||||
logger.info(u"Tautulli Newsletters :: %s newsletter saved to %s" % (self.NAME, newsletter_file))
|
||||
logger.info(u"Tautulli Newsletters :: %s newsletter saved to '%s'" % (self.NAME, newsletter_file))
|
||||
except OSError as e:
|
||||
logger.error(u"Tautulli Newsletters :: Failed to save %s newsletter to %s: %s"
|
||||
logger.error(u"Tautulli Newsletters :: Failed to save %s newsletter to '%s': %s"
|
||||
% (self.NAME, newsletter_file, e))
|
||||
|
||||
def _send(self):
|
||||
@@ -475,7 +498,10 @@ class Newsletter(object):
|
||||
def _build_params(self):
|
||||
date_format = helpers.momentjs_to_arrow(plexpy.CONFIG.DATE_FORMAT)
|
||||
|
||||
base_url = plexpy.CONFIG.HTTP_BASE_URL or helpers.get_plexpy_url()
|
||||
if plexpy.CONFIG.NEWSLETTER_SELF_HOSTED and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
base_url = plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'newsletter/'
|
||||
else:
|
||||
base_url = helpers.get_plexpy_url() + '/newsletter/'
|
||||
|
||||
parameters = {
|
||||
'server_name': plexpy.CONFIG.PMS_NAME,
|
||||
@@ -484,8 +510,11 @@ class Newsletter(object):
|
||||
'week_number': self.start_date.isocalendar()[1],
|
||||
'newsletter_time_frame': self.config['time_frame'],
|
||||
'newsletter_time_frame_units': self.config['time_frame_units'],
|
||||
'newsletter_url': base_url.rstrip('/') + plexpy.HTTP_ROOT + 'newsletter/' + self.uuid,
|
||||
'newsletter_uuid': self.uuid
|
||||
'newsletter_url': base_url + self.uuid,
|
||||
'newsletter_static_url': base_url + 'id/' + self.newsletter_id_name,
|
||||
'newsletter_uuid': self.uuid,
|
||||
'newsletter_id': self.newsletter_id,
|
||||
'newsletter_id_name': self.newsletter_id_name
|
||||
}
|
||||
|
||||
return parameters
|
||||
@@ -529,6 +558,23 @@ class Newsletter(object):
|
||||
|
||||
return subject, body, message
|
||||
|
||||
def build_filename(self):
|
||||
from notification_handler import CustomFormatter
|
||||
custom_formatter = CustomFormatter()
|
||||
|
||||
try:
|
||||
filename = custom_formatter.format(unicode(self.filename), **self.parameters)
|
||||
except LookupError as e:
|
||||
logger.error(
|
||||
u"Tautulli Newsletter :: Unable to parse parameter %s in newsletter filename. Using fallback." % e)
|
||||
filename = unicode(self._DEFAULT_FILENAME).format(**self.parameters)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
u"Tautulli Newsletter :: Unable to parse custom newsletter subject: %s. Using fallback." % e)
|
||||
filename = unicode(self._DEFAULT_FILENAME).format(**self.parameters)
|
||||
|
||||
return filename
|
||||
|
||||
def return_config_options(self):
|
||||
return self._return_config_options()
|
||||
|
||||
@@ -674,7 +720,7 @@ class RecentlyAdded(Newsletter):
|
||||
return recently_added
|
||||
|
||||
def retrieve_data(self):
|
||||
from notification_handler import get_imgur_info, set_hash_image_info
|
||||
from notification_handler import get_img_info, set_hash_image_info
|
||||
|
||||
if not self.config['incl_libraries']:
|
||||
logger.warn(u"Tautulli Newsletters :: Failed to retrieve %s newsletter data: no libraries selected." % self.NAME)
|
||||
@@ -692,7 +738,7 @@ class RecentlyAdded(Newsletter):
|
||||
artists = recently_added.get('artist', [])
|
||||
albums = [a for artist in artists for a in artist['album']]
|
||||
|
||||
if self.is_preview or plexpy.CONFIG.NEWSLETTER_SELF_HOSTED:
|
||||
if self.is_preview or helpers.get_img_service(include_self=True) == 'self-hosted':
|
||||
for item in movies + shows + albums:
|
||||
if item['media_type'] == 'album':
|
||||
height = 150
|
||||
@@ -714,8 +760,8 @@ class RecentlyAdded(Newsletter):
|
||||
item['poster_url'] = ''
|
||||
item['art_url'] = ''
|
||||
|
||||
else:
|
||||
# Upload posters and art to Imgur
|
||||
elif helpers.get_img_service():
|
||||
# Upload posters and art to image hosting service
|
||||
for item in movies + shows + albums:
|
||||
if item['media_type'] == 'album':
|
||||
height = 150
|
||||
@@ -724,17 +770,17 @@ class RecentlyAdded(Newsletter):
|
||||
height = 225
|
||||
fallback = 'poster'
|
||||
|
||||
imgur_info = get_imgur_info(
|
||||
img_info = get_img_info(
|
||||
img=item['thumb'], rating_key=item['rating_key'], title=item['title'],
|
||||
width=150, height=height, fallback=fallback)
|
||||
|
||||
item['poster_url'] = imgur_info.get('imgur_url') or common.ONLINE_POSTER_THUMB
|
||||
item['poster_url'] = img_info.get('img_url') or common.ONLINE_POSTER_THUMB
|
||||
|
||||
imgur_info = get_imgur_info(
|
||||
img_info = get_img_info(
|
||||
img=item['art'], rating_key=item['rating_key'], title=item['title'],
|
||||
width=500, height=280, opacity=25, background='282828', blur=3, fallback='art')
|
||||
|
||||
item['art_url'] = imgur_info.get('imgur_url')
|
||||
item['art_url'] = img_info.get('img_url')
|
||||
|
||||
item['thumb_hash'] = ''
|
||||
item['art_hash'] = ''
|
||||
@@ -797,4 +843,4 @@ class RecentlyAdded(Newsletter):
|
||||
}
|
||||
]
|
||||
|
||||
return config_options + additional_config
|
||||
return additional_config + config_options
|
||||
|
@@ -256,7 +256,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
|
||||
|
||||
elif parameter_type == 'float':
|
||||
values = [helpers.cast_to_float(v) for v in values]
|
||||
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(u"Tautulli NotificationHandler :: Unable to cast condition '%s', values '%s', to type '%s'."
|
||||
% (parameter, values, parameter_type))
|
||||
@@ -317,7 +317,9 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
|
||||
else:
|
||||
evaluated_logic = all(evaluated_conditions[1:])
|
||||
|
||||
logger.debug(u"Tautulli NotificationHandler :: Custom condition evaluated to '%s'." % str(evaluated_logic))
|
||||
logger.debug(u"Tautulli NotificationHandler :: Custom condition evaluated to '{}'. Conditions: {}.".format(
|
||||
evaluated_logic, evaluated_conditions[1:]))
|
||||
|
||||
return evaluated_logic
|
||||
|
||||
return True
|
||||
@@ -632,11 +634,12 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
else:
|
||||
poster_thumb = ''
|
||||
|
||||
if plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS == 1:
|
||||
imgur_info = get_imgur_info(img=poster_thumb, rating_key=poster_key, title=poster_title, fallback='poster')
|
||||
poster_info = {'poster_title': imgur_info['imgur_title'], 'poster_url': imgur_info['imgur_url']}
|
||||
img_service = helpers.get_img_service(include_self=True)
|
||||
if img_service not in (None, 'self-hosted'):
|
||||
img_info = get_img_info(img=poster_thumb, rating_key=poster_key, title=poster_title, fallback='poster')
|
||||
poster_info = {'poster_title': img_info['img_title'], 'poster_url': img_info['img_url']}
|
||||
notify_params.update(poster_info)
|
||||
elif plexpy.CONFIG.NOTIFY_UPLOAD_POSTERS == 2 and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
elif img_service == 'self-hosted' and plexpy.CONFIG.HTTP_BASE_URL:
|
||||
img_hash = set_hash_image_info(img=poster_thumb, fallback='poster')
|
||||
poster_info = {'poster_title': poster_title,
|
||||
'poster_url': plexpy.CONFIG.HTTP_BASE_URL + plexpy.HTTP_ROOT + 'image/' + img_hash}
|
||||
@@ -709,7 +712,7 @@ def build_media_notify_params(notify_action=None, session=None, timeline=None, m
|
||||
'server_machine_id': plexpy.CONFIG.PMS_IDENTIFIER,
|
||||
'server_platform': plexpy.CONFIG.PMS_PLATFORM,
|
||||
'server_version': plexpy.CONFIG.PMS_VERSION,
|
||||
'action': notify_action.lstrip('on_'),
|
||||
'action': notify_action.split('on_')[-1],
|
||||
'week_number': arrow.now().isocalendar()[1],
|
||||
'datestamp': arrow.now().format(date_format),
|
||||
'timestamp': arrow.now().format(time_format),
|
||||
@@ -914,7 +917,7 @@ def build_server_notify_params(notify_action=None, **kwargs):
|
||||
'server_platform': plexpy.CONFIG.PMS_PLATFORM,
|
||||
'server_version': plexpy.CONFIG.PMS_VERSION,
|
||||
'server_machine_id': plexpy.CONFIG.PMS_IDENTIFIER,
|
||||
'action': notify_action.lstrip('on_'),
|
||||
'action': notify_action.split('on_')[-1],
|
||||
'datestamp': arrow.now().format(date_format),
|
||||
'timestamp': arrow.now().format(time_format),
|
||||
'unixtime': int(time.time()),
|
||||
@@ -1076,49 +1079,109 @@ def format_group_index(group_keys):
|
||||
return ','.join(num) or '0', ','.join(num00) or '00'
|
||||
|
||||
|
||||
def get_imgur_info(img=None, rating_key=None, title='', width=600, height=1000,
|
||||
opacity=100, background='000000', blur=0, fallback=None):
|
||||
imgur_info = {'imgur_title': '', 'imgur_url': ''}
|
||||
def get_img_info(img=None, rating_key=None, title='', width=1000, height=1500,
|
||||
opacity=100, background='000000', blur=0, fallback=None):
|
||||
img_info = {'img_title': '', 'img_url': ''}
|
||||
|
||||
image_info = {'img': img,
|
||||
'rating_key': rating_key,
|
||||
'width': width,
|
||||
'height': height,
|
||||
'opacity': opacity,
|
||||
'background': background,
|
||||
'blur': blur,
|
||||
'fallback': fallback}
|
||||
if not rating_key and not img:
|
||||
return img_info
|
||||
|
||||
if rating_key and not img:
|
||||
if fallback == 'art':
|
||||
img = '/library/metadata/{}/art'.format(rating_key)
|
||||
else:
|
||||
img = '/library/metadata/{}/thumb'.format(rating_key)
|
||||
|
||||
img_split = img.split('/')
|
||||
img = '/'.join(img_split[:5])
|
||||
rating_key = rating_key or img_split[3]
|
||||
|
||||
service = helpers.get_img_service()
|
||||
|
||||
if service is None:
|
||||
return img_info
|
||||
|
||||
elif service == 'cloudinary':
|
||||
if fallback == 'cover':
|
||||
w, h = 1000, 1000
|
||||
elif fallback == 'art':
|
||||
w, h = 1920, 1080
|
||||
else:
|
||||
w, h = 1000, 1500
|
||||
|
||||
image_info = {'img': img,
|
||||
'rating_key': rating_key,
|
||||
'width': w,
|
||||
'height': h,
|
||||
'opacity': 100,
|
||||
'background': '000000',
|
||||
'blur': 0,
|
||||
'fallback': fallback}
|
||||
|
||||
else:
|
||||
image_info = {'img': img,
|
||||
'rating_key': rating_key,
|
||||
'width': width,
|
||||
'height': height,
|
||||
'opacity': opacity,
|
||||
'background': background,
|
||||
'blur': blur,
|
||||
'fallback': fallback}
|
||||
|
||||
# Try to retrieve poster info from the database
|
||||
data_factory = datafactory.DataFactory()
|
||||
database_imgur_info = data_factory.get_imgur_info(**image_info)
|
||||
database_img_info = data_factory.get_img_info(service=service, **image_info)
|
||||
|
||||
if database_imgur_info:
|
||||
imgur_info = database_imgur_info[0]
|
||||
if database_img_info:
|
||||
img_info = database_img_info[0]
|
||||
|
||||
elif not database_imgur_info and img:
|
||||
elif not database_img_info and img:
|
||||
pms_connect = pmsconnect.PmsConnect()
|
||||
result = pms_connect.get_image(**image_info)
|
||||
result = pms_connect.get_image(refresh=True, **image_info)
|
||||
|
||||
if result and result[0]:
|
||||
imgur_url, delete_hash = helpers.upload_to_imgur(img_data=result[0],
|
||||
img_title=title,
|
||||
rating_key=rating_key,
|
||||
fallback=fallback)
|
||||
img_url = delete_hash = ''
|
||||
|
||||
if imgur_url:
|
||||
if service == 'imgur':
|
||||
img_url, delete_hash = helpers.upload_to_imgur(img_data=result[0],
|
||||
img_title=title,
|
||||
rating_key=rating_key,
|
||||
fallback=fallback)
|
||||
elif service == 'cloudinary':
|
||||
img_url = helpers.upload_to_cloudinary(img_data=result[0],
|
||||
img_title=title,
|
||||
rating_key=rating_key,
|
||||
fallback=fallback)
|
||||
|
||||
if img_url:
|
||||
img_hash = set_hash_image_info(**image_info)
|
||||
data_factory.set_imgur_info(img_hash=img_hash,
|
||||
imgur_title=title,
|
||||
imgur_url=imgur_url,
|
||||
delete_hash=delete_hash)
|
||||
data_factory.set_img_info(img_hash=img_hash,
|
||||
img_title=title,
|
||||
img_url=img_url,
|
||||
delete_hash=delete_hash,
|
||||
service=service)
|
||||
|
||||
imgur_info = {'imgur_title': title, 'imgur_url': imgur_url}
|
||||
img_info = {'img_title': title, 'img_url': img_url}
|
||||
|
||||
return imgur_info
|
||||
if img_info['img_url'] and service == 'cloudinary':
|
||||
# Transform image using Cloudinary
|
||||
image_info = {'rating_key': rating_key,
|
||||
'width': width,
|
||||
'height': height,
|
||||
'opacity': opacity,
|
||||
'background': background,
|
||||
'blur': blur,
|
||||
'fallback': fallback,
|
||||
'img_title': title}
|
||||
|
||||
transformed_url = helpers.cloudinary_transform(**image_info)
|
||||
if transformed_url:
|
||||
img_info['img_url'] = transformed_url
|
||||
|
||||
return img_info
|
||||
|
||||
|
||||
def set_hash_image_info(img=None, rating_key=None, width=600, height=1000,
|
||||
def set_hash_image_info(img=None, rating_key=None, width=750, height=1000,
|
||||
opacity=100, background='000000', blur=0, fallback=None):
|
||||
if not rating_key and not img:
|
||||
return fallback
|
||||
|
@@ -1301,6 +1301,7 @@ class EMAIL(Notifier):
|
||||
else:
|
||||
msg = MIMEText(body, 'plain', 'utf-8')
|
||||
|
||||
msg['Message-ID'] = email.utils.make_msgid()
|
||||
msg['Date'] = email.utils.formatdate(localtime=True)
|
||||
msg['Subject'] = subject
|
||||
msg['From'] = email.utils.formataddr((self.config['from_name'], self.config['from']))
|
||||
@@ -1309,26 +1310,30 @@ class EMAIL(Notifier):
|
||||
|
||||
recipients = self.config['to'] + self.config['cc'] + self.config['bcc']
|
||||
|
||||
success = False
|
||||
mailserver = smtplib.SMTP(self.config['smtp_server'], self.config['smtp_port'])
|
||||
|
||||
try:
|
||||
mailserver = smtplib.SMTP(self.config['smtp_server'], self.config['smtp_port'])
|
||||
mailserver.ehlo()
|
||||
|
||||
if self.config['tls']:
|
||||
mailserver.starttls()
|
||||
|
||||
mailserver.ehlo()
|
||||
mailserver.ehlo()
|
||||
|
||||
if self.config['smtp_user']:
|
||||
mailserver.login(str(self.config['smtp_user']), str(self.config['smtp_password']))
|
||||
|
||||
mailserver.sendmail(self.config['from'], recipients, msg.as_string())
|
||||
mailserver.quit()
|
||||
|
||||
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
|
||||
return True
|
||||
success = True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(u"Tautulli Notifiers :: {name} notification failed: {e}".format(name=self.NAME, e=e))
|
||||
return False
|
||||
|
||||
finally:
|
||||
mailserver.quit()
|
||||
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
|
||||
|
||||
return success
|
||||
|
||||
def get_user_emails(self):
|
||||
emails = {u['email']: u['friendly_name'] for u in users.Users().get_users() if u['email']}
|
||||
@@ -2795,24 +2800,53 @@ class PUSHOVER(Notifier):
|
||||
return self.make_request('https://api.pushover.net/1/messages.json', headers=headers, data=data, files=files)
|
||||
|
||||
def get_sounds(self):
|
||||
if self.config['api_token']:
|
||||
params = {'token': self.config['api_token']}
|
||||
sounds = {
|
||||
'': '',
|
||||
'alien': 'Alien Alarm (long)',
|
||||
'bike': 'Bike',
|
||||
'bugle': 'Bugle',
|
||||
'cashregister': 'Cash Register',
|
||||
'classical': 'Classical',
|
||||
'climb': 'Climb (long)',
|
||||
'cosmic': 'Cosmic',
|
||||
'echo': 'Pushover Echo (long)',
|
||||
'falling': 'Falling',
|
||||
'gamelan': 'Gamelan',
|
||||
'incoming': 'Incoming',
|
||||
'intermission': 'Intermission',
|
||||
'magic': 'Magic',
|
||||
'mechanical': 'Mechanical',
|
||||
'none': 'None (silent)',
|
||||
'persistent': 'Persistent (long)',
|
||||
'pianobar': 'Piano Bar',
|
||||
'pushover': 'Pushover (default)',
|
||||
'siren': 'Siren',
|
||||
'spacealarm': 'Space Alarm',
|
||||
'tugboat': 'Tug Boat',
|
||||
'updown': 'Up Down (long)'
|
||||
}
|
||||
|
||||
r = requests.get('https://api.pushover.net/1/sounds.json', params=params)
|
||||
return sounds
|
||||
|
||||
if r.status_code == 200:
|
||||
response_data = r.json()
|
||||
sounds = response_data.get('sounds', {})
|
||||
sounds.update({'': ''})
|
||||
return sounds
|
||||
else:
|
||||
logger.error(u"Tautulli Notifiers :: Unable to retrieve {name} sounds list: "
|
||||
u"[{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
|
||||
logger.debug(u"Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
|
||||
return {'': ''}
|
||||
|
||||
else:
|
||||
return {'': ''}
|
||||
# if self.config['api_token']:
|
||||
# params = {'token': self.config['api_token']}
|
||||
#
|
||||
# r = requests.get('https://api.pushover.net/1/sounds.json', params=params)
|
||||
#
|
||||
# if r.status_code == 200:
|
||||
# response_data = r.json()
|
||||
# sounds = response_data.get('sounds', {})
|
||||
# sounds.update({'': ''})
|
||||
# print sounds
|
||||
# return sounds
|
||||
# else:
|
||||
# logger.error(u"Tautulli Notifiers :: Unable to retrieve {name} sounds list: "
|
||||
# u"[{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
|
||||
# logger.debug(u"Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
|
||||
# return {'': ''}
|
||||
#
|
||||
# else:
|
||||
# return {'': ''}
|
||||
|
||||
def return_config_options(self):
|
||||
config_option = [{'label': 'Pushover API Token',
|
||||
@@ -2934,6 +2968,7 @@ class SCRIPTS(Notifier):
|
||||
}
|
||||
|
||||
self.arg_overrides = ('python2', 'python3', 'python', 'pythonw', 'php', 'ruby', 'perl')
|
||||
self.script_killed = False
|
||||
|
||||
def list_scripts(self):
|
||||
scriptdir = self.config['script_folder']
|
||||
@@ -2953,12 +2988,6 @@ class SCRIPTS(Notifier):
|
||||
return scripts
|
||||
|
||||
def run_script(self, script):
|
||||
def kill_script(process):
|
||||
logger.warn(u"Tautulli Notifiers :: Script exceeded timeout limit of %d seconds. "
|
||||
"Script killed." % self.config['timeout'])
|
||||
process.kill()
|
||||
self.script_killed = True
|
||||
|
||||
# Common environment variables
|
||||
env = {'PLEX_URL': plexpy.CONFIG.PMS_URL,
|
||||
'PLEX_TOKEN': plexpy.CONFIG.PMS_TOKEN,
|
||||
@@ -2967,8 +2996,6 @@ class SCRIPTS(Notifier):
|
||||
}
|
||||
env.update(os.environ)
|
||||
|
||||
self.script_killed = False
|
||||
output = error = ''
|
||||
try:
|
||||
process = subprocess.Popen(script,
|
||||
stdin=subprocess.PIPE,
|
||||
@@ -2978,7 +3005,7 @@ class SCRIPTS(Notifier):
|
||||
env=env)
|
||||
|
||||
if self.config['timeout'] > 0:
|
||||
timer = threading.Timer(self.config['timeout'], kill_script, (process,))
|
||||
timer = threading.Timer(self.config['timeout'], self.kill_script, (process,))
|
||||
else:
|
||||
timer = None
|
||||
|
||||
@@ -3008,6 +3035,12 @@ class SCRIPTS(Notifier):
|
||||
logger.info(u"Tautulli Notifiers :: Script notification sent.")
|
||||
return True
|
||||
|
||||
def kill_script(self, process):
|
||||
process.kill()
|
||||
self.script_killed = True
|
||||
logger.warn(u"Tautulli Notifiers :: Script exceeded timeout limit of %d seconds. "
|
||||
"Script killed." % self.config['timeout'])
|
||||
|
||||
def agent_notify(self, subject='', body='', action='', **kwargs):
|
||||
"""
|
||||
Args:
|
||||
@@ -3434,6 +3467,10 @@ class TWITTER(Notifier):
|
||||
parameters = kwargs['parameters']
|
||||
poster_url = parameters.get('poster_url','')
|
||||
|
||||
# Hack to add media type to attachment
|
||||
if poster_url:
|
||||
poster_url += '.png'
|
||||
|
||||
if self.config['incl_subject']:
|
||||
return self._send_tweet(subject + '\r\n' + body, attachment=poster_url)
|
||||
else:
|
||||
|
@@ -2435,8 +2435,8 @@ class PmsConnect(object):
|
||||
|
||||
return labels_list
|
||||
|
||||
def get_image(self, img=None, width=600, height=1000, opacity=None, background=None, blur=None,
|
||||
img_format='png', clip=False, **kwargs):
|
||||
def get_image(self, img=None, width=1000, height=1500, opacity=None, background=None, blur=None,
|
||||
img_format='png', clip=False, refresh=False, **kwargs):
|
||||
"""
|
||||
Return image data as array.
|
||||
Array contains the image content type and image binary
|
||||
@@ -2450,10 +2450,13 @@ class PmsConnect(object):
|
||||
Output: array
|
||||
"""
|
||||
|
||||
width = width or 600
|
||||
height = height or 1000
|
||||
width = width or 1000
|
||||
height = height or 1500
|
||||
|
||||
if img:
|
||||
if refresh:
|
||||
img = '{}/{}'.format(img.rstrip('/'), int(time.time()))
|
||||
|
||||
if clip:
|
||||
params = {'url': '%s&%s' % (img, urllib.urlencode({'X-Plex-Token': self.token}))}
|
||||
else:
|
||||
@@ -2544,7 +2547,7 @@ class PmsConnect(object):
|
||||
metadata = self.get_metadata_details(rating_key=rating_key)
|
||||
search_results_list[metadata['media_type']].append(metadata)
|
||||
|
||||
output = {'results_count': sum(len(s) for s in search_results_list.items()),
|
||||
output = {'results_count': sum(len(s) for s in search_results_list.values()),
|
||||
'results_list': search_results_list
|
||||
}
|
||||
|
||||
|
@@ -1,2 +1,2 @@
|
||||
PLEXPY_BRANCH = "beta"
|
||||
PLEXPY_RELEASE_VERSION = "v2.1.1-beta"
|
||||
PLEXPY_RELEASE_VERSION = "v2.1.6-beta"
|
||||
|
@@ -131,7 +131,7 @@ def getVersion():
|
||||
return None, 'origin', common.BRANCH
|
||||
|
||||
|
||||
def checkGithub(auto_update=False):
|
||||
def check_github(auto_update=False, notify=False):
|
||||
plexpy.COMMITS_BEHIND = 0
|
||||
|
||||
# Get the latest version available from github
|
||||
@@ -198,8 +198,11 @@ def checkGithub(auto_update=False):
|
||||
|
||||
plexpy.LATEST_RELEASE = release['tag_name']
|
||||
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpyupdate', 'plexpy_download_info': release,
|
||||
'plexpy_update_commit': plexpy.LATEST_VERSION, 'plexpy_update_behind': plexpy.COMMITS_BEHIND})
|
||||
if notify:
|
||||
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpyupdate',
|
||||
'plexpy_download_info': release,
|
||||
'plexpy_update_commit': plexpy.LATEST_VERSION,
|
||||
'plexpy_update_behind': plexpy.COMMITS_BEHIND})
|
||||
|
||||
if auto_update:
|
||||
logger.info('Running automatic update.')
|
||||
|
@@ -2739,6 +2739,9 @@ class WebInterface(object):
|
||||
"group_history_tables": checked(plexpy.CONFIG.GROUP_HISTORY_TABLES),
|
||||
"git_token": plexpy.CONFIG.GIT_TOKEN,
|
||||
"imgur_client_id": plexpy.CONFIG.IMGUR_CLIENT_ID,
|
||||
"cloudinary_cloud_name": plexpy.CONFIG.CLOUDINARY_CLOUD_NAME,
|
||||
"cloudinary_api_key": plexpy.CONFIG.CLOUDINARY_API_KEY,
|
||||
"cloudinary_api_secret": plexpy.CONFIG.CLOUDINARY_API_SECRET,
|
||||
"cache_images": checked(plexpy.CONFIG.CACHE_IMAGES),
|
||||
"pms_version": plexpy.CONFIG.PMS_VERSION,
|
||||
"plexpy_auto_update": checked(plexpy.CONFIG.PLEXPY_AUTO_UPDATE),
|
||||
@@ -2752,7 +2755,8 @@ class WebInterface(object):
|
||||
"tvmaze_lookup": checked(plexpy.CONFIG.TVMAZE_LOOKUP),
|
||||
"show_advanced_settings": plexpy.CONFIG.SHOW_ADVANCED_SETTINGS,
|
||||
"newsletter_dir": plexpy.CONFIG.NEWSLETTER_DIR,
|
||||
"newsletter_self_hosted": checked(plexpy.CONFIG.NEWSLETTER_SELF_HOSTED)
|
||||
"newsletter_self_hosted": checked(plexpy.CONFIG.NEWSLETTER_SELF_HOSTED),
|
||||
"newsletter_custom_dir": plexpy.CONFIG.NEWSLETTER_CUSTOM_DIR
|
||||
}
|
||||
|
||||
return serve_template(templatename="settings.html", title="Settings", config=config, kwargs=kwargs)
|
||||
@@ -3699,7 +3703,7 @@ class WebInterface(object):
|
||||
}
|
||||
```
|
||||
"""
|
||||
versioncheck.checkGithub()
|
||||
versioncheck.check_github()
|
||||
|
||||
if not plexpy.CURRENT_VERSION:
|
||||
return {'result': 'error',
|
||||
@@ -3959,13 +3963,20 @@ class WebInterface(object):
|
||||
return
|
||||
|
||||
if rating_key and not img:
|
||||
img = '/library/metadata/%s/thumb/1337' % rating_key
|
||||
if fallback == 'art':
|
||||
img = '/library/metadata/{}/art'.format(rating_key)
|
||||
else:
|
||||
img = '/library/metadata/{}/thumb'.format(rating_key)
|
||||
|
||||
img_string = img.rsplit('/', 1)[0] if '/library/metadata' in img else img
|
||||
img_string = '{}{}{}{}{}{}'.format(img_string, width, height, opacity, background, blur)
|
||||
img_split = img.split('/')
|
||||
img = '/'.join(img_split[:5])
|
||||
rating_key = rating_key or img_split[3]
|
||||
|
||||
fp = hashlib.md5(img_string).hexdigest()
|
||||
fp += '.%s' % img_format # we want to be able to preview the thumbs
|
||||
img_string = '{}.{}.{}.{}.{}.{}.{}.{}'.format(
|
||||
plexpy.CONFIG.PMS_UUID, img, rating_key, width, height, opacity, background, blur, fallback)
|
||||
img_hash = hashlib.sha256(img_string).hexdigest()
|
||||
|
||||
fp = '{}.{}'.format(img_hash, img_format) # we want to be able to preview the thumbs
|
||||
c_dir = os.path.join(plexpy.CONFIG.CACHE_DIR, 'images')
|
||||
ffp = os.path.join(c_dir, fp)
|
||||
|
||||
@@ -3991,7 +4002,8 @@ class WebInterface(object):
|
||||
background=background,
|
||||
blur=blur,
|
||||
img_format=img_format,
|
||||
clip=clip)
|
||||
clip=clip,
|
||||
refresh=refresh)
|
||||
|
||||
if result and result[0]:
|
||||
cherrypy.response.headers['Content-type'] = result[1]
|
||||
@@ -4020,7 +4032,7 @@ class WebInterface(object):
|
||||
@cherrypy.expose
|
||||
def image(self, *args, **kwargs):
|
||||
if args:
|
||||
img_hash = args[0]
|
||||
img_hash = args[0].split('.')[0]
|
||||
|
||||
if img_hash in ('poster', 'cover', 'art'):
|
||||
if img_hash == 'poster':
|
||||
@@ -4158,30 +4170,31 @@ class WebInterface(object):
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def delete_imgur_poster(self, rating_key='', **kwargs):
|
||||
""" Delete the Imgur poster.
|
||||
def delete_hosted_images(self, rating_key='', service='', **kwargs):
|
||||
""" Delete the images uploaded to image hosting services.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
rating_key (int): 1234
|
||||
(Note: Must be the movie, show, season, artist, or album rating key)
|
||||
Optional parameters:
|
||||
None
|
||||
service (str): imgur or cloudinary
|
||||
(Note: Defaults to service in Image Hosting setting)
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"result": "success",
|
||||
"message": "Deleted Imgur poster."}
|
||||
"message": "Deleted hosted images from Imgur."}
|
||||
```
|
||||
"""
|
||||
|
||||
data_factory = datafactory.DataFactory()
|
||||
result = data_factory.delete_imgur_info(rating_key=rating_key)
|
||||
result = data_factory.delete_img_info(rating_key=rating_key, service=service)
|
||||
|
||||
if result:
|
||||
return {'result': 'success', 'message': 'Deleted Imgur poster.'}
|
||||
return {'result': 'success', 'message': 'Deleted hosted images from %s.' % result.capitalize()}
|
||||
else:
|
||||
return {'result': 'error', 'message': 'Failed to delete Imgur poster.'}
|
||||
return {'result': 'error', 'message': 'Failed to delete hosted images.'}
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@@ -5649,10 +5662,18 @@ class WebInterface(object):
|
||||
except NotFound:
|
||||
return
|
||||
|
||||
cherrypy.response.headers['Cache-Control'] = 'max-age=2592000' # 30 days
|
||||
return self.image(args[1], refresh=True)
|
||||
|
||||
newsletter_uuid = args[0]
|
||||
newsletter = newsletter_handler.get_newsletter(newsletter_uuid=newsletter_uuid)
|
||||
if len(args) >= 2 and args[0] == 'id':
|
||||
newsletter_id_name = args[1]
|
||||
newsletter_uuid = None
|
||||
else:
|
||||
newsletter_id_name = None
|
||||
newsletter_uuid = args[0]
|
||||
|
||||
newsletter = newsletter_handler.get_newsletter(newsletter_uuid=newsletter_uuid,
|
||||
newsletter_id_name=newsletter_id_name)
|
||||
return newsletter
|
||||
|
||||
@cherrypy.expose
|
||||
@@ -5671,7 +5692,9 @@ class WebInterface(object):
|
||||
newsletter = newsletters.get_newsletter_config(newsletter_id=newsletter_id)
|
||||
|
||||
if newsletter:
|
||||
newsletter_agent = newsletters.get_agent_class(agent_id=newsletter['agent_id'],
|
||||
newsletter_agent = newsletters.get_agent_class(newsletter_id=newsletter_id,
|
||||
newsletter_id_name=newsletter['id_name'],
|
||||
agent_id=newsletter['agent_id'],
|
||||
config=newsletter['config'],
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
|
Reference in New Issue
Block a user