update packages
This commit is contained in:
@@ -118,6 +118,13 @@ Set `doi-utils-make-notes' to nil if you want no notes."
|
||||
:type 'string
|
||||
:group 'doi-utils)
|
||||
|
||||
(defcustom doi-utils-metadata-function 'doi-utils-get-json-metadata
|
||||
"Function for retrieving json metadata from `doi-utils-dx-doi-org-url'.
|
||||
The default is `doi-utils-get-json-metadata', but it sometimes
|
||||
fails with a proxy. An alternative is
|
||||
`doi-utils-get-json-metadata-curl' which requires an external
|
||||
program to use curl.")
|
||||
|
||||
|
||||
;;* Getting pdf files from a DOI
|
||||
|
||||
@@ -208,39 +215,18 @@ must return a pdf-url, or nil.")
|
||||
"Stores url to pdf download from a callback function.")
|
||||
|
||||
;;** Wiley
|
||||
;; Wiley have changed the url structure from
|
||||
;; http://onlinelibrary.wiley.com/doi/10.1002/anie.201402680/abstract
|
||||
;; http://onlinelibrary.wiley.com/doi/10.1002/anie.201402680/pdf
|
||||
|
||||
;; It appears that it is not enough to use the pdf url above. That takes you to
|
||||
;; an html page. The actual link to teh pdf is embedded in that page. This is
|
||||
;; how ScienceDirect does things too.
|
||||
|
||||
;; This is where the link is hidden:
|
||||
|
||||
;; <iframe id="pdfDocument" src="http://onlinelibrary.wiley.com/store/10.1002/anie.201402680/asset/6397_ftp.pdf?v=1&t=hwut2142&s=d4bb3cd4ad20eb733836717f42346ffb34017831" width="100%" height="675px"></iframe>
|
||||
|
||||
|
||||
(defun doi-utils-get-wiley-pdf-url (redirect-url)
|
||||
"Wileyscience direct hides the pdf url in html.
|
||||
We get it out here by parsing the html.
|
||||
Argument REDIRECT-URL URL you are redirected to."
|
||||
(setq *doi-utils-waiting* t)
|
||||
(url-retrieve
|
||||
redirect-url
|
||||
(lambda (status)
|
||||
(goto-char (point-min))
|
||||
(re-search-forward "<iframe id=\"pdfDocument\" src=\"\\([^\"]*\\)\"" nil t)
|
||||
(setq *doi-utils-pdf-url* (match-string 1)
|
||||
*doi-utils-waiting* nil)))
|
||||
(while *doi-utils-waiting* (sleep-for 0.1))
|
||||
*doi-utils-pdf-url*)
|
||||
;; to
|
||||
;; http://onlinelibrary.wiley.com/doi/abs/10.1002/anie.201402680
|
||||
;; http://onlinelibrary.wiley.com/doi/pdf/10.1002/anie.201402680
|
||||
;; Hence fewer steps are now required.
|
||||
|
||||
(defun wiley-pdf-url (*doi-utils-redirect*)
|
||||
"Get url to the pdf from *DOI-UTILS-REDIRECT*."
|
||||
(when (string-match "^http\\(s?\\)://onlinelibrary.wiley.com" *doi-utils-redirect*)
|
||||
(doi-utils-get-wiley-pdf-url
|
||||
(replace-regexp-in-string "/abstract" "/pdf" *doi-utils-redirect*))
|
||||
*doi-utils-pdf-url*))
|
||||
(replace-regexp-in-string "doi/abs" "doi/pdf" *doi-utils-redirect*)))
|
||||
|
||||
|
||||
(defun agu-pdf-url (*doi-utils-redirect*)
|
||||
@@ -551,15 +537,11 @@ REDIRECT-URL is where the pdf url will be in."
|
||||
(match-string 1))))))))
|
||||
|
||||
;; ACM Digital Library
|
||||
;; http(s)://dl.acm.org/citation.cfm?doid=1368088.1368132
|
||||
;; <a name="FullTextPDF" title="FullText PDF" href="ft_gateway.cfm?id=1368132&ftid=518423&dwn=1&CFID=766519780&CFTOKEN=49739320" target="_blank">
|
||||
;; https://dl.acm.org/doi/10.1145/1368088.1368132
|
||||
(defun acm-pdf-url (*doi-utils-redirect*)
|
||||
"Get a url to the pdf from *DOI-UTILS-REDIRECT* for ACM urls."
|
||||
(when (string-match "^https?://dl.acm.org" *doi-utils-redirect*)
|
||||
(with-current-buffer (url-retrieve-synchronously *doi-utils-redirect*)
|
||||
(goto-char (point-min))
|
||||
(when (re-search-forward "<a name=\"FullTextPDF\".*href=\"\\([[:ascii:]]*?\\)\"" nil t)
|
||||
(concat "http://dl.acm.org/" (match-string 1))))))
|
||||
(replace-regexp-in-string "doi" "doi/pdf" *doi-utils-redirect* )))
|
||||
|
||||
;;** Optical Society of America (OSA)
|
||||
(defun osa-pdf-url (*doi-utils-redirect*)
|
||||
@@ -603,6 +585,12 @@ It would be better to parse this, but here I just use a regexp.
|
||||
(when (string-match "^http\\(s?\\)://epubs.siam.org" *doi-utils-redirect*)
|
||||
(replace-regexp-in-string "/doi/" "/doi/pdf/" *doi-utils-redirect* )))
|
||||
|
||||
;; PLOS journals
|
||||
;; https://plos.org/
|
||||
(defun plos-pdf-url (*doi-utils-redirect*)
|
||||
"Get url to the pdf from *DOI-UTILS-REDIRECT*."
|
||||
(when (string-match "^http\\(s*\\)://journals.plos.org" *doi-utils-redirect*)
|
||||
(concat (replace-regexp-in-string (regexp-quote "/article?id=") "/article/file?id=" *doi-utils-redirect*) "&type=printable")))
|
||||
|
||||
|
||||
;;** Add all functions
|
||||
@@ -639,6 +627,7 @@ It would be better to parse this, but here I just use a regexp.
|
||||
'asme-biomechanical-pdf-url
|
||||
'siam-pdf-url
|
||||
'agu-pdf-url
|
||||
'plos-pdf-url
|
||||
'generic-full-pdf-url))
|
||||
|
||||
;;** Get the pdf url for a doi
|
||||
@@ -750,17 +739,44 @@ Opening %s" json-data url))
|
||||
(t
|
||||
(json-read-from-string json-data))))))
|
||||
|
||||
|
||||
(defun doi-utils-get-json-metadata-curl (doi)
|
||||
"Try to get json metadata for DOI. Open the DOI in a browser if we do not get it."
|
||||
(let ((json-object-type 'plist)
|
||||
(json-data)
|
||||
(url (concat doi-utils-dx-doi-org-url doi)))
|
||||
(with-temp-buffer
|
||||
(call-process "curl" nil t nil
|
||||
"--location"
|
||||
"--silent"
|
||||
"--header"
|
||||
"Accept: application/citeproc+json"
|
||||
url)
|
||||
(setq json-data (buffer-string))
|
||||
(cond
|
||||
((or (string-match "<title>Error: DOI Not Found</title>" json-data)
|
||||
(string-match "Resource not found" json-data)
|
||||
(string-match "Status *406" json-data)
|
||||
(string-match "400 Bad Request" json-data))
|
||||
(browse-url url)
|
||||
(error "Something went wrong. We got this response:
|
||||
%s
|
||||
Opening %s" json-data url))
|
||||
;; everything seems ok with the data
|
||||
(t
|
||||
(json-read-from-string json-data))))))
|
||||
|
||||
;; We can use that data to construct a bibtex entry. We do that by defining a
|
||||
;; template, and filling it in. I wrote this template expansion code which makes
|
||||
;; it easy to substitute values like %{} in emacs lisp.
|
||||
;; template, and filling it in. I wrote this template expansion code which
|
||||
;; makes it easy to substitute values like %{} in emacs lisp.
|
||||
|
||||
|
||||
(defun doi-utils-expand-template (s)
|
||||
"Expand a string template S containing %{} with the eval of its contents."
|
||||
(replace-regexp-in-string "%{\\([^}]+\\)}"
|
||||
(lambda (arg)
|
||||
(let ((sexp (substring arg 2 -1)))
|
||||
(format "%s" (eval (read sexp)))))
|
||||
(lambda (arg)
|
||||
(let ((sexp (substring arg 2 -1)))
|
||||
(format "%s" (eval (read sexp)))))
|
||||
s))
|
||||
|
||||
|
||||
@@ -877,7 +893,7 @@ MATCHING-TYPES."
|
||||
|
||||
(defun doi-utils-doi-to-bibtex-string (doi)
|
||||
"Return a bibtex entry as a string for the DOI. Not all types are supported yet."
|
||||
(let* ((results (doi-utils-get-json-metadata doi))
|
||||
(let* ((results (funcall doi-utils-metadata-function doi))
|
||||
(type (plist-get results :type)))
|
||||
;; (format "%s" results) ; json-data
|
||||
(or (-some (lambda (g) (funcall g type results)) doi-utils-bibtex-type-generators)
|
||||
@@ -933,59 +949,7 @@ Argument BIBFILE the bibliography to use."
|
||||
(list (read-string
|
||||
"DOI: "
|
||||
;; now set initial input
|
||||
(cond
|
||||
;; If region is active and it starts like a doi we want it.
|
||||
((and (region-active-p)
|
||||
(s-match "^10" (buffer-substring
|
||||
(region-beginning)
|
||||
(region-end))))
|
||||
(buffer-substring (region-beginning) (region-end)))
|
||||
((and (region-active-p)
|
||||
(s-match "^http://dx\\.doi\\.org/" (buffer-substring
|
||||
(region-beginning)
|
||||
(region-end))))
|
||||
(replace-regexp-in-string "^http://dx\\.doi\\.org/" ""
|
||||
(buffer-substring (region-beginning) (region-end))))
|
||||
((and (region-active-p)
|
||||
(s-match "^https://dx\\.doi\\.org/" (buffer-substring
|
||||
(region-beginning)
|
||||
(region-end))))
|
||||
(replace-regexp-in-string "^https://dx\\.doi\\.org/" ""
|
||||
(buffer-substring (region-beginning) (region-end))))
|
||||
((and (region-active-p)
|
||||
(s-match (regexp-quote doi-utils-dx-doi-org-url) (buffer-substring
|
||||
(region-beginning)
|
||||
(region-end))))
|
||||
(replace-regexp-in-string (regexp-quote doi-utils-dx-doi-org-url) ""
|
||||
(buffer-substring (region-beginning) (region-end)))
|
||||
(buffer-substring (region-beginning) (region-end)))
|
||||
;; if the first entry in the kill-ring looks
|
||||
;; like a DOI, let's use it.
|
||||
((and
|
||||
;; make sure the kill-ring has something in it
|
||||
(stringp (car kill-ring))
|
||||
(s-match "^10" (car kill-ring)))
|
||||
(car kill-ring))
|
||||
;; maybe kill-ring matches http://dx.doi or somthing
|
||||
((and
|
||||
;; make sure the kill-ring has something in it
|
||||
(stringp (car kill-ring))
|
||||
(s-match "^http://dx\\.doi\\.org/" (car kill-ring)))
|
||||
(replace-regexp-in-string "^http://dx\\.doi\\.org/" "" (car kill-ring)))
|
||||
((and
|
||||
;; make sure the kill-ring has something in it
|
||||
(stringp (car kill-ring))
|
||||
(s-match "^https://dx\\.doi\\.org/" (car kill-ring)))
|
||||
(replace-regexp-in-string "^https://dx\\.doi\\.org/" "" (car kill-ring)))
|
||||
((and
|
||||
;; make sure the kill-ring has something in it
|
||||
(stringp (car kill-ring))
|
||||
(s-match (regexp-quote doi-utils-dx-doi-org-url) (car kill-ring)))
|
||||
(replace-regexp-in-string (regexp-quote doi-utils-dx-doi-org-url) "" (car kill-ring)))
|
||||
;; otherwise, we have no initial input. You
|
||||
;; will have to type it in.
|
||||
(t
|
||||
nil)))))
|
||||
(doi-utils-maybe-doi-from-region-or-current-kill))))
|
||||
|
||||
(unless bibfile
|
||||
(setq bibfile (completing-read "Bibfile: " (org-ref-possible-bibfiles))))
|
||||
@@ -1009,6 +973,53 @@ Argument BIBFILE the bibliography to use."
|
||||
(defalias 'doi-add-bibtex-entry 'doi-utils-add-bibtex-entry-from-doi
|
||||
"Alias function for convenience.")
|
||||
|
||||
(defun doi-utils-maybe-doi-from-region-or-current-kill ()
|
||||
"Try to get a DOI from the active region or current kill."
|
||||
(let* ((the-active-region (if (region-active-p) ;; nil if no active region
|
||||
(buffer-substring (region-beginning) (region-end))
|
||||
nil))
|
||||
(the-current-kill (ignore-errors (current-kill 0 t))) ;; nil if empty kill ring
|
||||
;; DOI urls
|
||||
;; Ex: https://doi.org/10.1109/MALWARE.2014.6999410
|
||||
;; Ex: https://dx.doi.org/10.1007/978-3-319-60876-1_10
|
||||
(doi-url-prefix-regexp "^https?://\\(dx\\.\\)?doi\\.org/")
|
||||
;; https://www.crossref.org/blog/dois-and-matching-regular-expressions/
|
||||
(doi-regexp "10\\.[0-9]\\{4,9\\}/[-._;()/:A-Z0-9]+$"))
|
||||
(cond
|
||||
;; Check if a DOI can be found in the active region
|
||||
;; DOI raw
|
||||
;; Ex: 10.1109/MALWARE.2014.6999410
|
||||
((and (stringp the-active-region)
|
||||
(s-match (concat "^" doi-regexp) the-active-region))
|
||||
the-active-region)
|
||||
;; DOI url
|
||||
;; Ex: https://doi.org/10.1109/MALWARE.2014.6999410
|
||||
((and (stringp the-active-region)
|
||||
(s-match (concat doi-url-prefix-regexp doi-regexp) the-active-region))
|
||||
(replace-regexp-in-string doi-url-prefix-regexp "" the-active-region))
|
||||
;; DOI url as customized
|
||||
((and (stringp the-active-region)
|
||||
(s-match (regexp-quote doi-utils-dx-doi-org-url) the-active-region))
|
||||
(replace-regexp-in-string (regexp-quote doi-utils-dx-doi-org-url) "" the-active-region))
|
||||
;; Check if DOI can be found in the current kill
|
||||
;; DOI raw
|
||||
;; Ex: 10.1109/MALWARE.2014.6999410
|
||||
((and (stringp the-current-kill)
|
||||
(s-match (concat "^" doi-regexp) the-current-kill))
|
||||
the-current-kill)
|
||||
;; DOI url
|
||||
;; Ex: https://doi.org/10.1109/MALWARE.2014.6999410
|
||||
((and (stringp the-current-kill)
|
||||
(s-match (concat doi-url-prefix-regexp doi-regexp) the-current-kill))
|
||||
(replace-regexp-in-string doi-url-prefix-regexp "" the-current-kill))
|
||||
;; DOI url as customized
|
||||
((and (stringp the-current-kill)
|
||||
(s-match (regexp-quote doi-utils-dx-doi-org-url) the-current-kill))
|
||||
(replace-regexp-in-string (regexp-quote doi-utils-dx-doi-org-url) "" the-current-kill))
|
||||
;; otherwise, return nil
|
||||
(t
|
||||
nil))))
|
||||
|
||||
;;;###autoload
|
||||
(defun doi-utils-doi-to-org-bibtex (doi)
|
||||
"Convert a DOI to an ‘org-bibtex’ form and insert it at point."
|
||||
@@ -1074,7 +1085,7 @@ Every field will be updated, so previous change will be lost."
|
||||
"https?://\\(dx.\\)?doi.org/" ""
|
||||
(bibtex-autokey-get-field "doi"))
|
||||
(read-string "DOI: "))))
|
||||
(let* ((results (doi-utils-get-json-metadata doi))
|
||||
(let* ((results (funcall doi-utils-metadata-function doi))
|
||||
(type (plist-get results :type))
|
||||
(author (mapconcat
|
||||
(lambda (x)
|
||||
@@ -1131,7 +1142,7 @@ Every field will be updated, so previous change will be lost."
|
||||
Data is retrieved from the doi in the entry."
|
||||
(interactive)
|
||||
(let* ((doi (bibtex-autokey-get-field "doi"))
|
||||
(results (doi-utils-get-json-metadata doi))
|
||||
(results (funcall doi-utils-metadata-function doi))
|
||||
(field (car (bibtex-find-text-internal nil nil ","))))
|
||||
(cond
|
||||
((string= field "volume")
|
||||
@@ -1194,6 +1205,15 @@ May be empty if none are found."
|
||||
"&svc_val_fmt=info%3Aofi%2Ffmt%3Akev%3Amtx%3Asch_svc&svc.related=yes")))
|
||||
|
||||
|
||||
;;* DOI functions for ADS
|
||||
|
||||
;;;###autoload
|
||||
(defun doi-utils-ads (doi)
|
||||
"Open ADS entry for DOI"
|
||||
(interactive "sDOI: ")
|
||||
(browse-url
|
||||
(concat
|
||||
"https://ui.adsabs.harvard.edu/abs/" "%22" doi "%22")))
|
||||
|
||||
|
||||
;;* A new doi link for org-mode
|
||||
@@ -1263,6 +1283,7 @@ must take one argument, the doi.")
|
||||
("w" "os" doi-utils-wos)
|
||||
("c" "iting articles" doi-utils-wos-citing)
|
||||
("r" "elated articles" doi-utils-wos-related)
|
||||
("a" "ds" doi-utils-ads)
|
||||
("s" "Google Scholar" doi-utils-google-scholar)
|
||||
("f" "CrossRef" doi-utils-crossref)
|
||||
("p" "ubmed" doi-utils-pubmed)
|
||||
|
||||
Reference in New Issue
Block a user