update of packages
This commit is contained in:
@@ -54,6 +54,7 @@
|
||||
(require 'org-bibtex)) ; org-bibtex-yank
|
||||
|
||||
(require 'url-http)
|
||||
(require 'url-handlers)
|
||||
(require 'org-ref-utils)
|
||||
(require 'hydra)
|
||||
|
||||
@@ -213,10 +214,23 @@ must return a pdf-url, or nil.")
|
||||
;; http://onlinelibrary.wiley.com/doi/pdf/10.1002/anie.201402680
|
||||
;; Hence fewer steps are now required.
|
||||
|
||||
(defun wiley-pdf-url (*doi-utils-redirect*)
|
||||
"Get url to the pdf from *DOI-UTILS-REDIRECT*."
|
||||
;; https://onlinelibrary.wiley.com/doi/10.1002/adts.202200926
|
||||
;; https://onlinelibrary.wiley.com/doi/epdf/10.1002/adts.202200926
|
||||
|
||||
;; (defun wiley-pdf-url (*doi-utils-redirect*)
|
||||
;; "Get url to the pdf from *DOI-UTILS-REDIRECT*."
|
||||
;; (when (string-match "^http\\(s?\\)://onlinelibrary.wiley.com" *doi-utils-redirect*)
|
||||
;; (replace-regexp-in-string "doi/abs" "doi/pdf" *doi-utils-redirect*)))
|
||||
|
||||
|
||||
(defun wiley-pdf-url-2 (*doi-utils-redirect*)
|
||||
"Get url to the pdf from *DOI-UTILS-REDIRECT*.
|
||||
[2023-04-10 Mon] updated a new rule.
|
||||
https://onlinelibrary.wiley.com/doi/pdfdirect/10.1002/anie.201310461?download=true"
|
||||
(when (string-match "^http\\(s?\\)://onlinelibrary.wiley.com" *doi-utils-redirect*)
|
||||
(replace-regexp-in-string "doi/abs" "doi/pdf" *doi-utils-redirect*)))
|
||||
(concat
|
||||
(replace-regexp-in-string "doi/" "doi/pdfdirect/" *doi-utils-redirect*)
|
||||
"?download=true")))
|
||||
|
||||
|
||||
(defun agu-pdf-url (*doi-utils-redirect*)
|
||||
@@ -340,39 +354,66 @@ must return a pdf-url, or nil.")
|
||||
url)))
|
||||
|
||||
;;** Science Direct
|
||||
(defun doi-utils-get-science-direct-pdf-url (redirect-url)
|
||||
"Science direct hides the pdf url in html. We get it out here.
|
||||
REDIRECT-URL is where the pdf url will be in."
|
||||
(let ((first-url
|
||||
(with-current-buffer (url-retrieve-synchronously redirect-url)
|
||||
(goto-char (point-min))
|
||||
(when (re-search-forward "pdf_url\" content=\"\\([^\"]*\\)\"" nil t)
|
||||
(match-string-no-properties 1)))))
|
||||
(and first-url
|
||||
(with-current-buffer (url-retrieve-synchronously first-url)
|
||||
(goto-char (point-min))
|
||||
(when (re-search-forward "or click <a href=\"\\([^\"]*\\)\">" nil t)
|
||||
(match-string-no-properties 1))))))
|
||||
|
||||
;; https://www.sciencedirect.com/science/article/pii/S001085452200577X?via%3Dihub
|
||||
;; https://www.sciencedirect.com/science/article/pii/S001085452200577X/pdfft?isDTMRedir=true&download=true
|
||||
|
||||
(defun science-direct-pdf-url (*doi-utils-redirect*)
|
||||
"Get url to the pdf from *DOI-UTILS-REDIRECT*."
|
||||
(when (string-match "^http\\(s?\\)://www.sciencedirect.com" *doi-utils-redirect*)
|
||||
(doi-utils-get-science-direct-pdf-url *doi-utils-redirect*)))
|
||||
(replace-string "?via%3Dihub" "/pdfft?isDTMRedir=true&download=true" *doi-utils-redirect*)))
|
||||
|
||||
;; (defun doi-utils-get-science-direct-pdf-url (redirect-url)
|
||||
;; "Science direct hides the pdf url in html. We get it out here.
|
||||
;; REDIRECT-URL is where the pdf url will be in."
|
||||
;; (let ((first-url
|
||||
;; (with-current-buffer (url-retrieve-synchronously redirect-url)
|
||||
;; (goto-char (point-min))
|
||||
;; (when (re-search-forward "pdf_url\" content=\"\\([^\"]*\\)\"" nil t)
|
||||
;; (match-string-no-properties 1)))))
|
||||
;; (and first-url
|
||||
;; (with-current-buffer (url-retrieve-synchronously first-url)
|
||||
;; (goto-char (point-min))
|
||||
;; (when (re-search-forward "or click <a href=\"\\([^\"]*\\)\">" nil t)
|
||||
;; (match-string-no-properties 1))))))
|
||||
|
||||
;; (defun science-direct-pdf-url (*doi-utils-redirect*)
|
||||
;; "Get url to the pdf from *DOI-UTILS-REDIRECT*."
|
||||
;; (when (string-match "^http\\(s?\\)://www.sciencedirect.com" *doi-utils-redirect*)
|
||||
;; (doi-utils-get-science-direct-pdf-url *doi-utils-redirect*)))
|
||||
|
||||
;; sometimes I get
|
||||
;; http://linkinghub.elsevier.com/retrieve/pii/S0927025609004558
|
||||
;; which actually redirect to
|
||||
;; http://www.sciencedirect.com/science/article/pii/S0927025609004558
|
||||
|
||||
;; https://www.sciencedirect.com/science/article/pii/S001085452200577X?via%3Dihub
|
||||
;; https://www.sciencedirect.com/science/article/pii/S001085452200577X/pdfft?isDTMRedir=true&download=true
|
||||
|
||||
;; (defun linkinghub-elsevier-pdf-url (*doi-utils-redirect*)
|
||||
;; "Get url to the pdf from *DOI-UTILS-REDIRECT*."
|
||||
;; (when (string-match
|
||||
;; "^https://linkinghub.elsevier.com/retrieve" *doi-utils-redirect*)
|
||||
;; (science-direct-pdf-url
|
||||
;; (replace-regexp-in-string
|
||||
;; ;; change URL to science direct and use function to get pdf URL
|
||||
;; "https://linkinghub.elsevier.com/retrieve"
|
||||
;; "https://www.sciencedirect.com/science/article"
|
||||
;; *doi-utils-redirect*))))
|
||||
|
||||
;; https://www.sciencedirect.com/science/article/pii/S1385894723014973/pdfft?isDTMRedir=true&download=true
|
||||
|
||||
(defun linkinghub-elsevier-pdf-url (*doi-utils-redirect*)
|
||||
"Get url to the pdf from *DOI-UTILS-REDIRECT*."
|
||||
(when (string-match
|
||||
"^https://linkinghub.elsevier.com/retrieve" *doi-utils-redirect*)
|
||||
(doi-utils-get-science-direct-pdf-url
|
||||
(concat
|
||||
(replace-regexp-in-string
|
||||
;; change URL to science direct and use function to get pdf URL
|
||||
"https://linkinghub.elsevier.com/retrieve"
|
||||
"https://www.sciencedirect.com/science/article"
|
||||
*doi-utils-redirect*))))
|
||||
*doi-utils-redirect*)
|
||||
"/pdfft?isDTMRedir=true")))
|
||||
|
||||
;;** PNAS
|
||||
;; http://www.pnas.org/content/early/2014/05/08/1319030111
|
||||
@@ -550,9 +591,11 @@ REDIRECT-URL is where the pdf url will be in."
|
||||
|
||||
|
||||
|
||||
;;** ASME Biomechanical Journal
|
||||
;;** Publishers using Highwire Press metatags
|
||||
;; For context and details, see:
|
||||
;; https://webmasters.stackexchange.com/questions/72746/where-are-the-complete-set-of-highwire-press-metatags-defined
|
||||
|
||||
(defun asme-biomechanical-pdf-url (*doi-utils-redirect*)
|
||||
(defun highwire-pdf-url (*doi-utils-redirect*)
|
||||
"Typical URL: http://biomechanical.asmedigitalcollection.asme.org/article.aspx?articleid=1427237
|
||||
|
||||
On this page the pdf might be here: <meta name=\"citation_author\" content=\"Dalong Li\" /><meta name=\"citation_author_email\" content=\"dal40@pitt.edu\" /><meta name=\"citation_author\" content=\"Anne M. Robertson\" /><meta name=\"citation_author_email\" content=\"rbertson@pitt.edu\" /><meta name=\"citation_title\" content=\"A Structural Multi-Mechanism Damage Model for Cerebral Arterial Tissue\" /><meta name=\"citation_firstpage\" content=\"101013\" /><meta name=\"citation_doi\" content=\"10.1115/1.3202559\" /><meta name=\"citation_keyword\" content=\"Mechanisms\" /><meta name=\"citation_keyword\" content=\"Biological tissues\" /><meta name=\"citation_keyword\" content=\"Stress\" /><meta name=\"citation_keyword\" content=\"Fibers\" /><meta name=\"citation_journal_title\" content=\"Journal of Biomechanical Engineering\" /><meta name=\"citation_journal_abbrev\" content=\"J Biomech Eng\" /><meta name=\"citation_volume\" content=\"131\" /><meta name=\"citation_issue\" content=\"10\" /><meta name=\"citation_publication_date\" content=\"2009/10/01\" /><meta name=\"citation_issn\" content=\"0148-0731\" /><meta name=\"citation_publisher\" content=\"American Society of Mechanical Engineers\" /><meta name=\"citation_pdf_url\" content=\"http://biomechanical.asmedigitalcollection.asme.org/data/journals/jbendy/27048/101013_1.pdf\" />
|
||||
@@ -562,13 +605,17 @@ It is in the citation_pdf_url.
|
||||
It would be better to parse this, but here I just use a regexp.
|
||||
"
|
||||
|
||||
(when (string-match "^http\\(s?\\)://biomechanical.asmedigitalcollection.asme.org" *doi-utils-redirect*)
|
||||
(when (or (string-match "^http\\(s?\\)://biomechanical.asmedigitalcollection.asme.org" *doi-utils-redirect*)
|
||||
(string-match "^http\\(s?\\)://ojs.aaai.org" *doi-utils-redirect*)
|
||||
(string-match "^http\\(s?\\)://aclanthology.org" *doi-utils-redirect*))
|
||||
(setq *doi-utils-waiting* 0)
|
||||
(url-retrieve
|
||||
*doi-utils-redirect*
|
||||
(lambda (status)
|
||||
(goto-char (point-min))
|
||||
(re-search-forward "citation_pdf_url\" content=\"\\(.*\\)\"" nil t)
|
||||
(or (progn (goto-char (point-min))
|
||||
(re-search-forward "citation_pdf_url\"? content=\"\\(.*\\)\"" nil t))
|
||||
(progn (goto-char (point-min))
|
||||
(re-search-forward "\"\\([^\"]*\\)\" name=\"?citation_pdf_url" nil t)))
|
||||
;; (message-box (match-string 1))
|
||||
(setq *doi-utils-pdf-url* (match-string 1)
|
||||
*doi-utils-waiting* nil)))
|
||||
@@ -592,6 +639,47 @@ It would be better to parse this, but here I just use a regexp.
|
||||
(concat (replace-regexp-in-string (regexp-quote "/article?id=") "/article/file?id=" *doi-utils-redirect*) "&type=printable")))
|
||||
|
||||
|
||||
;; https://www.frontiersin.org/articles/10.3389/fchem.2022.1037997/full
|
||||
;; https://www.frontiersin.org/articles/10.3389/fchem.2022.1037997/pdf
|
||||
(defun frontiers-pdf-url (*doi-utils-redirect*)
|
||||
(when (string-match "^http\\(s*\\)://www.frontiersin.org" *doi-utils-redirect*)
|
||||
(replace-regexp-in-string "/full" "/pdf" *doi-utils-redirect*)))
|
||||
|
||||
|
||||
|
||||
|
||||
;; https://chemistry-europe.onlinelibrary.wiley.com/doi/10.1002/celc.201902035
|
||||
;; https://chemistry-europe.onlinelibrary.wiley.com/doi/epdf/10.1002/celc.201902035
|
||||
(defun chemistry-europe-pdf-url (*doi-utils-redirect*)
|
||||
(when (string-match "^http\\(s*\\)://chemistry-europe.onlinelibrary.wiley.com" *doi-utils-redirect*)
|
||||
(concat
|
||||
(replace-regexp-in-string "/doi" "/doi/pdfdirect" *doi-utils-redirect*)
|
||||
"?download=true")))
|
||||
|
||||
|
||||
;; ** from issue #1081
|
||||
|
||||
(defun arxiv-pdf-url (*doi-utils-redirect*)
|
||||
"Get url to the pdf from *DOI-UTILS-REDIRECT*."
|
||||
(when (string-match-p "^https?://arxiv\\.org" *doi-utils-redirect*)
|
||||
(concat (replace-regexp-in-string "/abs/" "/pdf/" *doi-utils-redirect*)
|
||||
".pdf")))
|
||||
|
||||
|
||||
(defun rss-pdf-url (*doi-utils-redirect*)
|
||||
"Get url to the pdf from *DOI-UTILS-REDIRECT*."
|
||||
(when (string-match-p "roboticsproceedings" *doi-utils-redirect*)
|
||||
(concat (replace-regexp-in-string "\\.html" ".pdf" *doi-utils-redirect*))))
|
||||
|
||||
|
||||
(defun ieeestamp-pdf-url (*doi-utils-redirect*)
|
||||
"Get url to the pdf from *DOI-UTILS-REDIRECT*."
|
||||
(when (string-match "^https?://ieeexplore\\.ieee\\.org/document/\\([0-9]+\\)"
|
||||
*doi-utils-redirect*)
|
||||
(concat "https://ieeexplore.ieee.org/stampPDF/getPDF.jsp?tp=&arnumber="
|
||||
(match-string 1 *doi-utils-redirect*))))
|
||||
|
||||
|
||||
;;** Add all functions
|
||||
|
||||
(setq doi-utils-pdf-url-functions
|
||||
@@ -599,7 +687,8 @@ It would be better to parse this, but here I just use a regexp.
|
||||
'aps-pdf-url
|
||||
'science-pdf-url
|
||||
'nature-pdf-url
|
||||
'wiley-pdf-url
|
||||
;; 'wiley-pdf-url
|
||||
'wiley-pdf-url-2
|
||||
'springer-chapter-pdf-url
|
||||
'springer-pdf-url
|
||||
'acs-pdf-url-1
|
||||
@@ -624,11 +713,16 @@ It would be better to parse this, but here I just use a regexp.
|
||||
'ieee3-pdf-url
|
||||
'acm-pdf-url
|
||||
'osa-pdf-url
|
||||
'asme-biomechanical-pdf-url
|
||||
'highwire-pdf-url
|
||||
'siam-pdf-url
|
||||
'agu-pdf-url
|
||||
'plos-pdf-url
|
||||
'generic-full-pdf-url))
|
||||
'frontiers-pdf-url
|
||||
'chemistry-europe-pdf-url
|
||||
'generic-full-pdf-url
|
||||
'arxiv-pdf-url
|
||||
'rss-pdf-url
|
||||
'ieeestamp-pdf-url))
|
||||
|
||||
;;** Get the pdf url for a doi
|
||||
|
||||
@@ -811,11 +905,10 @@ every field.")
|
||||
(json-object-type 'plist)
|
||||
(json-data)
|
||||
(url (concat doi-utils-dx-doi-org-url doi)))
|
||||
(with-current-buffer
|
||||
(url-retrieve-synchronously
|
||||
;; (concat "http://dx.doi.org/" doi)
|
||||
url)
|
||||
(setq json-data (buffer-substring url-http-end-of-headers (point-max)))
|
||||
(with-temp-buffer
|
||||
(url-insert
|
||||
(url-retrieve-synchronously url))
|
||||
(setq json-data (buffer-string))
|
||||
|
||||
(when (or (string-match "<title>Error: DOI Not Found</title>" json-data)
|
||||
(string-match "Resource not found" json-data)
|
||||
@@ -1011,7 +1104,8 @@ MATCHING-TYPES."
|
||||
(bibtex-set-field doi-utils-timestamp-field
|
||||
ts)))
|
||||
(org-ref-clean-bibtex-entry)
|
||||
(save-buffer))
|
||||
(when (buffer-file-name)
|
||||
(save-buffer)))
|
||||
|
||||
|
||||
;;;###autoload
|
||||
@@ -1020,7 +1114,7 @@ MATCHING-TYPES."
|
||||
Pick the file ending with .bib or in . If you have an active region that
|
||||
starts like a DOI, that will be the initial prompt. If no region
|
||||
is selected and the first entry of the ‘kill-ring’ starts like a
|
||||
DOI, then that is the intial prompt. Otherwise, you have to type
|
||||
DOI, then that is the initial prompt. Otherwise, you have to type
|
||||
or paste in a DOI.
|
||||
Argument BIBFILE the bibliography to use."
|
||||
(interactive
|
||||
@@ -1478,10 +1572,11 @@ Get a list of possible matches. Choose one with completion."
|
||||
(let ((url-request-method "GET")
|
||||
(url-mime-accept-string "application/citeproc+json")
|
||||
(json-data))
|
||||
(with-current-buffer
|
||||
(url-retrieve-synchronously
|
||||
(concat doi-utils-dx-doi-org-url doi))
|
||||
(setq json-data (buffer-substring url-http-end-of-headers (point-max)))
|
||||
(with-temp-buffer
|
||||
(url-insert
|
||||
(url-retrieve-synchronously
|
||||
(concat doi-utils-dx-doi-org-url doi)))
|
||||
(setq json-data (buffer-string))
|
||||
(if (string-match "Resource not found" json-data)
|
||||
(progn
|
||||
(browse-url (concat doi-utils-dx-doi-org-url doi))
|
||||
@@ -1502,10 +1597,11 @@ Get a list of possible matches. Choose one with completion."
|
||||
(let ((url-request-method "GET")
|
||||
(url-mime-accept-string "application/citeproc+json"))
|
||||
(pp
|
||||
(json-read-from-string (with-current-buffer
|
||||
(url-retrieve-synchronously
|
||||
(concat doi-utils-dx-doi-org-url doi))
|
||||
(buffer-substring url-http-end-of-headers (point-max))))))
|
||||
(json-read-from-string (with-temp-buffer
|
||||
(url-insert
|
||||
(url-retrieve-synchronously
|
||||
(concat doi-utils-dx-doi-org-url doi)))
|
||||
(buffer-string)))))
|
||||
"\n\n")
|
||||
(goto-char (point-min)))
|
||||
|
||||
@@ -1536,48 +1632,40 @@ Get a list of possible matches. Choose one with completion."
|
||||
"Bibfile: "
|
||||
(append (f-entries "." (lambda (f) (f-ext? f "bib")))
|
||||
bibtex-completion-bibliography))))
|
||||
(let* ((raw-json-string)
|
||||
(json-string)
|
||||
(json-data)
|
||||
(doi))
|
||||
(let* ((json-data (with-temp-buffer
|
||||
(url-insert
|
||||
(url-retrieve-synchronously
|
||||
(concat
|
||||
"https://api.crossref.org/works?query="
|
||||
(url-hexify-string query))))
|
||||
|
||||
(json-read-from-string (buffer-string))))
|
||||
(name (format "Crossref hits for %s"
|
||||
;; remove carriage returns. They can make completion confusing.
|
||||
(replace-regexp-in-string "\n" " " query)))
|
||||
(candidates (let-alist json-data
|
||||
(cl-loop for item across .message.items
|
||||
collect (let-alist item
|
||||
(cons (format "%s, %s, %s, %s."
|
||||
(string-join .title " ")
|
||||
(string-join
|
||||
(cl-loop for author across .author collect
|
||||
(let-alist author
|
||||
(format "%s %s"
|
||||
.given .family)))
|
||||
", ")
|
||||
.publisher
|
||||
.created.date-parts)
|
||||
.DOI)))))
|
||||
(doi (cdr (assoc (completing-read "Choice: " candidates) candidates))))
|
||||
|
||||
(with-current-buffer
|
||||
(url-retrieve-synchronously
|
||||
(concat
|
||||
"http://search.crossref.org/dois?q="
|
||||
(url-hexify-string query)))
|
||||
;; replace html entities
|
||||
(save-excursion
|
||||
(goto-char (point-min))
|
||||
(while (re-search-forward "<i>\\|</i>" nil t)
|
||||
(replace-match ""))
|
||||
(goto-char (point-min))
|
||||
(while (re-search-forward "&" nil t)
|
||||
(replace-match "&"))
|
||||
(goto-char (point-min))
|
||||
(while (re-search-forward """ nil t)
|
||||
(replace-match "\\\"" nil t)))
|
||||
(setq raw-json-string (buffer-substring url-http-end-of-headers (point-max)))
|
||||
;; decode json string
|
||||
(setq json-string (decode-coding-string (encode-coding-string raw-json-string 'utf-8) 'utf-8))
|
||||
(setq json-data (json-read-from-string json-string)))
|
||||
(with-current-buffer (find-file-noselect bibtex-file)
|
||||
(doi-utils-add-bibtex-entry-from-doi
|
||||
(replace-regexp-in-string
|
||||
"^https?://\\(dx.\\)?doi.org/" "" doi)
|
||||
bibtex-file)
|
||||
(save-buffer))))
|
||||
|
||||
(let* ((name (format "Crossref hits for %s"
|
||||
;; remove carriage returns. They can make completion confusing.
|
||||
(replace-regexp-in-string "\n" " " query)))
|
||||
(candidates (mapcar (lambda (x)
|
||||
(cons
|
||||
(concat
|
||||
(cdr (assoc 'fullCitation x)))
|
||||
(cdr (assoc 'doi x))))
|
||||
json-data))
|
||||
(doi (cdr (assoc (completing-read "Choice: " candidates) candidates))))
|
||||
(with-current-buffer (find-file-noselect bibtex-file)
|
||||
(doi-utils-add-bibtex-entry-from-doi
|
||||
(replace-regexp-in-string
|
||||
"^https?://\\(dx.\\)?doi.org/" "" doi)
|
||||
bibtex-file)
|
||||
(save-buffer)))))
|
||||
|
||||
(defalias 'crossref-add-bibtex-entry 'doi-utils-add-entry-from-crossref-query
|
||||
"Alias function for convenience.")
|
||||
|
||||
Reference in New Issue
Block a user