Getting article metadata from MS Academic: some R code

As promised, I went back and did this myself instead of relying on a partner in crime (earlier referred to as an SME but he outed himself). It's funny because I had his code, but he did things differently than I do them so I needed to do it myself.

First mostly successful run I ended up with about 44% of the rows missing the metadata. I discovered fairly quickly that using TM's removePunctuation was, of course (in retrospect), closing up instead of leaving a space for intraword dashes. You can have it ignore those, but you can't have it go ahead and leave a space. I first did some finding and replacing in Excel but that got me down to 32%. Then I was like, duh, just do the gsub for [[:punct:]] and see if that's better. I hope I haven't used my quota!

Here's the code. Sign up for your key here. Also note: not affiliated, not endorsing.

#microsoft academic to try to find affiliations for article titles

library("httr", lib.loc="~/R/win-library/3.3")
library("tm", lib.loc="~/R/win-library/3.3")
library("jsonlite", lib.loc="~/R/win-library/3.3")

setwd("~/DataScienceResearchInitiative")

#don't forget the following or you will regret it
options(stringsAsFactors = FALSE)

# api info https://dev.projectoxford.ai/docs/services/56332331778daf02acc0a50b/operations/565d753be597ed16ac3ffc03

# https://api.projectoxford.ai/academic/v1.0/evaluate[?expr][&model][&count][&offset][&orderby][&attributes]

#key:
msakey1<-"put yours here"

apiurl<-"https://api.projectoxford.ai/academic/v1.0/evaluate?expr="
searchexpr<-"Ti='example'"
apiattrib<-"Ti,Y,AA.AuN,AA.AfN,C.CN,J.JN,E"

#test on one to see how it works
testcite <- GET(apiurl, 
         query = list(expr = searchexpr,count = 1, attributes = apiattrib), add_headers("Ocp-Apim-Subscription-Key"= msakey1))

#get the json out into usable format
#could look for errors first
testcite$status_code

#comes out raw so need to make into text
testciteContent <- rawToChar(testcite$content)

test<-fromJSON(testciteContent)
test$entities$AA
test$entities$AA[[1]]$AuN
#this will get a ; separated vector
paste(test$entities$AA[[1]]$AuN, collapse = ';')

test$entities$AA[[1]]$AfN
test$entities$J$JN
test$entities$Y
test$entities$Ti

# initiate a dataframe
# for each title, go out and search using that title
# could add in a warn_for_status(r)  when status is not 200 (happy)
# if status !200 go to the next one,  if status =200
# extract ti, y, authors (paste), affil (paste), jn, cn, and out of entities VFN, V, FP LP DOI D
# write them to the data frame
#1904 is the length of my article title list

CitesOut<- data.frame(ti = rep(NA,1904),
                      y = integer(1904),
                      au = rep(NA,1904),
                      af = rep(NA,1904),
                      jn = rep(NA,1904),
                      cn = rep(NA,1904),
                      vfn = rep(NA,1904),
                      v = rep(NA,1904),
                      fp = rep(NA,1904),
                      lp = rep(NA,1904),
                      doi = rep(NA,1904),
                      abs = rep(NA,1904),
                      stringsAsFactors = FALSE)
  
getMScites<- function(citeNo){
  apiurl<-"https://api.projectoxford.ai/academic/v1.0/evaluate?expr="
  searchexpr<- paste0("Ti='",TitlesToFindf[citeNo],"'")
  apiattrib<-"Ti,Y,AA.AuN,AA.AfN,C.CN,J.JN,E"
  holding<-GET(apiurl,
               query = list(expr = searchexpr,count = 1, attributes = apiattrib), 
               add_headers("Ocp-Apim-Subscription-Key"= msakey1))
  print(paste("cite number", citeNo,"status is:", holding$status_code))
  print(holding$headers$`content-length`)
  holdingContent <- rawToChar(holding$content)
  holdC<-fromJSON(holdingContent)
  cciterow<-data.frame(
    ti=ifelse(is.null(holdC$entities$Ti),NA,holdC$entities$Ti),
    y=ifelse(is.null(holdC$entities$Y),NA,as.integer(holdC$entities$Y)), 
    au=ifelse(is.null(holdC$entities$AA[[1]]$AuN),NA,paste(holdC$entities$AA[[1]]$AuN, collapse = ';')),
    af=ifelse(is.null(holdC$entities$AA[[1]]$AfN),NA,paste(holdC$entities$AA[[1]]$AfN, collapse = ';')),
    jn=ifelse(is.null(holdC$entities$J$JN),NA,holdC$entities$J$JN),
    cn=ifelse(is.null(holdC$entities$C$CN),NA,holdC$entities$C$CN))
  print(cciterow)
  if(is.null(holdC$entities$E)){
    eciterow<-data.frame(
      vfn=NA,
      v=NA,
      fp=NA,
      lp=NA,
      doi=NA,
      abs=NA)
  } else {
    holdE<-fromJSON(holdC$entities$E)
    eciterow<-data.frame(
      vfn=ifelse(is.null(holdE$VFN),NA,holdE$VFN),
      v=ifelse(is.null(holdE$V),NA,holdE$V),
    fp=ifelse(is.null(holdE$FP),NA,holdE$FP),
    lp=ifelse(is.null(holdE$LP),NA,holdE$LP),
    doi=ifelse(is.null(holdE$DOI),NA,holdE$DOI),
    abs=ifelse(is.null(holdE$D),NA,holdE$D)
    )
  }
  print(eciterow)
  citerow<-cbind(cciterow,eciterow, stringsAsFactors=FALSE)
  print("this is citerow")
  print(citerow)
  return(citerow)
} 

#troubleshooting
apiurl<-"https://api.projectoxford.ai/academic/v1.0/evaluate?expr="
searchexpr<- paste0("Ti='",TitlesToFindf[4],"'")
apiattrib<-"Ti,Y,AA.AuN,AA.AfN,C.CN,J.JN,E"
troubleshoot<-GET(apiurl,
               query = list(expr = searchexpr,count = 1, attributes = apiattrib), 
               add_headers("Ocp-Apim-Subscription-Key"= msakey1))

troubleshoot$status_code
troubleshoot$headers$`content-length`

troubleshootcontent<-rawToChar(troubleshoot$content)  
troubleC<-fromJSON(troubleshootcontent)
troubleE<-fromJSON(troubleC$entities$E)

#prepare title list
## IMPORTANT - all the titles have to be lower case and there can't be any punctuation
TitlesToFind <- read.delim("~/DataScienceResearchInitiative/TitlesToFind.csv", header=FALSE)

TitlesToFindl<-apply(TitlesToFind,1,tolower)

TitlesToFindf<- gsub("[[:punct:]]"," ",TitlesToFindl)

head(TitlesToFindf)

#use the sys.sleep so you don't get an error for too many requests too quickly
for (i in 21:1904){
  temp<-getMScites(i)
  CitesOut[i,]<-temp
  Sys.sleep(2)}
write.csv(CitesOut,"MSdsCites.csv")

length(which(is.na(CitesOut$ti)))
length(which(is.na(CitesOut$abs)))

missCites<-which(is.na(CitesOut$ti))

for (i in 1:length(missCites)) {
  temp<-getMScites(missCites[i])
  CitesOut[missCites[i],]<-temp
  Sys.sleep(2)
}

edited: to fix formatting. also the missing cites were writing to the wrong rows.. sigh.

No responses yet

Leave a Reply