Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

added getJobQueues function in Jobs module & some other fixes #2

Open
wants to merge 3 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion NAMESPACE
Expand Up @@ -6,5 +6,5 @@ export(SciDrive.createContainer,SciDrive.upload,SciDrive.publicUrl,SciDrive.down
export(Config.isSciServerComputeEnvironment,Config.CasJobsRESTUri,Config.AuthenticationURL,Config.SciDriveHost,Config.SkyQueryUrl,Config.DataRelease,Config.SkyServerWSurl,Config.Version,Config.KeystoneTokenFilePath,Config.ComputeJobDirectoryFile,Config.RacmApiURL)
export(SkyQuery.getJobStatus,SkyQuery.cancelJob,SkyQuery.listQueues,SkyQuery.getQueueInfo,SkyQuery.submitJob,SkyQuery.waitForJob,SkyQuery.listJobs,SkyQuery.listAllDatasets,SkyQuery.getDatasetInfo,SkyQuery.listDatasetTables,SkyQuery.getTableInfo,SkyQuery.listTableColumns,SkyQuery.getTable,SkyQuery.dropTable,SkyQuery.uploadTable)
export(Files.getFileServices,Files.getFileServicesNames,Files.getFileServiceFromName,Files.__getFileServiceAPIUrl,Files.getRootVolumesInfo,Files.getDataVolumesInfo,Files.getUserVolumesInfo,Files.splitPath,Files.createUserVolume,Files.deleteUserVolume,Files.createDir,Files.delete,Files.upload,Files.download,Files.dirList,Files.move,Files.shareUserVolume)
export(Jobs.getDockerComputeDomains,Jobs.getDockerComputeDomainsNames,Jobs.getDockerComputeDomainFromName,Jobs.getRDBComputeDomains,Jobs.getRDBComputeDomainsNames,Jobs.getRDBComputeDomainFromName,Jobs.getJobsList,Jobs.getDockerJobsListQuick,Jobs.getJobDescription,Jobs.getJobStatus,Jobs.submitNotebookJob,Jobs.submitShellCommandJob,Jobs.submitRDBQueryJob,Jobs.cancelJob,Jobs.waitForJob)
export(Jobs.getDockerComputeDomains,Jobs.getDockerComputeDomainsNames,Jobs.getDockerComputeDomainFromName,Jobs.getRDBComputeDomains,Jobs.getRDBComputeDomainsNames,Jobs.getRDBComputeDomainFromName,Jobs.getJobsList,Jobs.getDockerJobsListQuick,Jobs.getJobDescription,Jobs.getJobStatus,Jobs.submitNotebookJob,Jobs.submitShellCommandJob,Jobs.submitRDBQueryJob,Jobs.cancelJob,Jobs.waitForJob,Jobs.getJobQueues)
import(httr,jsonlite,utils,data.table)
61 changes: 47 additions & 14 deletions R/Jobs.r
Expand Up @@ -162,7 +162,7 @@ Jobs.getJobsList <- function(top=10, open=NULL, start=NULL, end=NULL, type="all"
url = paste(Config.RacmApiURL,"/jobm/rest/dockerjobs?",sep="")
}

url = paste(url,topString,startString,endString,"TaskName=",taskName,sep="")
url = paste(url,topString,startString,endString,openString,"TaskName=",taskName,sep="")


r = GET(url,add_headers('X-Auth-Token'=token),accept("application/json"))
Expand Down Expand Up @@ -198,7 +198,7 @@ Jobs.getDockerJobsListQuick <- function(top=10, open=NULL, start=NULL, end=NULL,


url = paste(Config.RacmApiURL,"/jobm/rest/dockerjobs/quick?",sep="")
url = paste(url,topString,startString,endString,"TaskName=",taskName,sep="")
url = paste(url,topString,startString,endString,labRegString,openString,"TaskName=",taskName,sep="")

r = GET(url,add_headers('X-Auth-Token'=token),accept("application/json"))

Expand All @@ -212,6 +212,39 @@ Jobs.getDockerJobsListQuick <- function(top=10, open=NULL, start=NULL, end=NULL,
}
}

Jobs.getJobQueues <- function(format="dataframe"){

token = Authentication.getToken()
if(!is.null(token) && token != "")
{

if(Config.isSciServerComputeEnvironment()){
taskName = "Compute.SciScript-R.Jobs.getJobQueues"
}else{
taskName = "SciScript-R.Jobs.getJobQueues"
}

url = paste(Config.RacmApiURL,"/jobm/rest/jobs/queues?","TaskName=",taskName,sep="")

r = GET(url,add_headers('X-Auth-Token'=token),accept("application/json"))

if(r$status_code != 200) {
stop(paste("Error when getting job queues from JOBM API.\nHttp Response from JOBM API returned status code ", r$status_code, ":\n", content(r, as="text", encoding="UTF-8")))
} else {

if(format == "dataframe"){
r = fromJSON(content(r, "text", encoding="UTF-8"))
df <- data.frame(r$rows)
colnames(df) <- r$columns
return(df)
}else{
return(content(r))
}
}
}else{
stop(paste("User token is not defined. First log into SciServer."))
}
}

Jobs.getJobDescription <- function(jobId){

Expand Down Expand Up @@ -316,7 +349,7 @@ Jobs.submitNotebookJob <- function(notebookPath, dockerComputeDomain=NULL, docke
if(uVol$name == vol$name && uVol$rootVolumeName == vol$rootVolumeName && uVol$owner == vol$owner){

found = TRUE
if(uVol$needsWriteAccess){
if(!is.null(uVol$needsWriteAccess)){

if(uVol$needsWriteAccess == TRUE && ('write' %in% vol$allowedActions) ){
uVols[[length(uVols)+1]] <- list(userVolumeId= vol$id, needsWriteAccess= TRUE)
Expand Down Expand Up @@ -346,7 +379,7 @@ Jobs.submitNotebookJob <- function(notebookPath, dockerComputeDomain=NULL, docke
if( is.null(dataVolumes)){
for( i in 1:length(dockerComputeDomain$volumes)){
vol = dockerComputeDomain$volumes[[i]]
if("write" %in% vol$allowedActions){
if(vol$writable){
dataVols[[length(dataVols)+1]] <- list(id=vol$id, name= vol$name, writable=TRUE)
}else{
dataVols[[length(dataVols)+1]] <- list(id=vol$id, name= vol$name, writable=FALSE)
Expand All @@ -360,17 +393,17 @@ Jobs.submitNotebookJob <- function(notebookPath, dockerComputeDomain=NULL, docke
vol = dockerComputeDomain$volumes[[j]]
if( vol$name == dVol$name ){
found = TRUE;
if(dVol$needsWriteAccess){
if(!is.null(dVol$needsWriteAccess)){

if(dVol$needsWriteAccess == TRUE && ('write' %in% vol$allowedActions) ){
if(dVol$needsWriteAccess == TRUE){
dataVols[[length(dataVols)+1]] <- list(id= vol$id, name=vol$name, writable= TRUE)
}else{
dataVols[[length(dataVols)+1]] <- list(id= vol$id, name=vol$name, writable= FALSE)
}

}else{

if('write' %in% vol$allowedActions ){
if(vol$writable){
dataVols[[length(dataVols)+1]] <- list(id= vol$id, name=vol$name, writable= TRUE)
}else{
dataVols[[length(dataVols)+1]] <- list(id= vol$id, name=vol$name, writable= FALSE)
Expand Down Expand Up @@ -465,7 +498,7 @@ Jobs.submitShellCommandJob <- function(shellCommand, dockerComputeDomain = NULL,
if(uVol$name == vol$name && uVol$rootVolumeName == vol$rootVolumeName && uVol$owner == vol$owner){

found = TRUE
if(uVol$needsWriteAccess){
if(!is.null(uVol$needsWriteAccess)){

if(uVol$needsWriteAccess == TRUE && ('write' %in% vol$allowedActions) ){
uVols[[length(uVols)+1]] <- list(userVolumeId= vol$id, needsWriteAccess= TRUE)
Expand Down Expand Up @@ -495,7 +528,7 @@ Jobs.submitShellCommandJob <- function(shellCommand, dockerComputeDomain = NULL,
if( is.null(dataVolumes)){
for( i in 1:length(dockerComputeDomain$volumes)){
vol = dockerComputeDomain$volumes[[i]]
if("write" %in% vol$allowedActions){
if(vol$writable){
dataVols[[length(dataVols)+1]] <- list(id=vol$id, name= vol$name, writable=TRUE)
}else{
dataVols[[length(dataVols)+1]] <- list(id=vol$id, name= vol$name, writable=FALSE)
Expand All @@ -509,24 +542,24 @@ Jobs.submitShellCommandJob <- function(shellCommand, dockerComputeDomain = NULL,
vol = dockerComputeDomain$volumes[[j]]
if( vol$name == dVol$name ){
found = TRUE;
if(dVol$needsWriteAccess){
if(!is.null(dVol$needsWriteAccess)){

if(dVol$needsWriteAccess == TRUE && ('write' %in% vol$allowedActions) ){
if(dVol$needsWriteAccess == TRUE){
dataVols[[length(dataVols)+1]] <- list(id= vol$id, name=vol$name, writable= TRUE)
}else{
dataVols[[length(dataVols)+1]] <- list(id= vol$id, name=vol$name, writable= FALSE)
}

}else{

if('write' %in% vol$allowedActions ){
if(vol$writable){
dataVols[[length(dataVols)+1]] <- list(id= vol$id, name=vol$name, writable= TRUE)
}else{
dataVols[[length(dataVols)+1]] <- list(id= vol$id, name=vol$name, writable= FALSE)
}

}

}
}

Expand Down
29 changes: 29 additions & 0 deletions man/Jobs.getJobQueues.Rd
@@ -0,0 +1,29 @@
\name{Jobs.getJobQueues}
\alias{Jobs.getJobQueues}
\title{
Get Jobs Queues
}
\description{
Gets information about queues of jobs submitted to all compute domains, including the ranking jobs that users have already submitted to the queue for execution, as well as the ranking that a new job would get if submitted to a queue.
}
\usage{Jobs.getJobQueues()}
\arguments{
\item{format}{Format (string) of the returned object. If equal to "dataframe" (default setting), then the reult is a dataframe, else it is a list.}
}
\value{a dataframe or list containing job queues information.}
\author{
Manuchehr Taghizadeh-Popp \cr
Maintainer: Manuchehr Taghizadeh-Popp <mtaghiza@jhu.edu>
}
\seealso{
\code{\link[SciServer]{Jobs.getJobsList}},\code{\link[SciServer]{Jobs.submitNotebookJob}},\code{\link[SciServer]{Jobs.submitShellCommandJob}},\code{\link[SciServer]{Jobs.getJobStatus}},\code{\link[SciServer]{Jobs.getDockerComputeDomains}},\code{\link[SciServer]{Jobs.cancelJob}}
}
\references{
\href{http://www.sciserver.org}{http://www.sciserver.org}\cr
\href{http://apps.sciserver.org/}{http://apps.sciserver.org}\cr
\href{http://www.github.com/sciserver/SciScript-R}{http://www.github.com/sciserver/SciScript-R}
}
\examples{
jobs = Jobs.getJobQueues()
}
\keyword{SciServer}
2 changes: 1 addition & 1 deletion man/Jobs.submitNotebookJob.Rd
Expand Up @@ -13,7 +13,7 @@ Submits a Jupyter Notebook for execution (as an asynchronous job) inside a Docke
\item{dockerImageName}{name (string) of the Docker image for executing the notebook. E.g., dockerImageName="Python (astro)". An array of available Docker images is defined as the 'images' property in the dockerComputeDomain object.}
\item{userVolumes}{a list with the names of user volumes (with optional write permissions) that will be mounted to the docker Image. E.g.:
userVolumes = list( list(name="JobsTestVolume", rootVolumeName="Storage", owner="myUserName", needsWriteAccess=TRUE), list(name="scratch", rootVolumeName="Temporary", owner="myUserName", needsWriteAccess=TRUE) ). A list of available user volumes can be found as the 'userVolumes' property in the dockerComputeDomain object. If userVolumes=Null, then all available user volumes are mounted, with 'needsWriteAccess' = TRUE if the user has Write permissions on the volume.}
\item{dataVolumes}{a list with the names of data volumes that will be mounted to the docker Image. E.g.: dataVolumes=list(list(name='SDSS DAS')). A list of available data volumes can be found as the 'volumes' property in the dockerComputeDomain list. If dataVolumes=NULL, then all available data volumes are mounted.}
\item{dataVolumes}{a list with the names of data volumes (with optional write permissions) that will be mounted to the docker Image. E.g.: dataVolumes=list(list(name='SDSS DAS', needsWriteAccess=TRUE)). A list of available data volumes can be found as the 'volumes' property in the dockerComputeDomain list. If dataVolumes=NULL, then all available data volumes are mounted.}
\item{resultsFolderPath}{full path to results folder (string) where the original notebook is copied to and executed. E.g.: /home/idies/workspace/rootVolume/username/userVolume/jobsFolder. If not set, then a default folder will be set automatically.}
\item{parameters}{string containing parameters that the notebook might need during its execution. This string is written in the 'parameters.txt' file in the same directory level where the notebook is being executed.}
\item{jobAlias}{alias (string) of job, defined by the user.}
Expand Down
2 changes: 1 addition & 1 deletion man/Jobs.submitShellCommandJob.Rd
Expand Up @@ -13,7 +13,7 @@ Submits a shell command for execution (as an asynchronous job) inside a Docker c
\item{dockerImageName}{name (string) of the Docker image for executing the notebook. E.g., dockerImageName="Python (astro)". An array of available Docker images is defined as the 'images' property in the dockerComputeDomain object.}
\item{userVolumes}{a list with the names of user volumes (with optional write permissions) that will be mounted to the docker Image. E.g.:
userVolumes = list( list(name="JobsTestVolume", rootVolumeName="Storage", owner="myUserName", needsWriteAccess=TRUE), list(name="scratch", rootVolumeName="Temporary", owner="myUserName", needsWriteAccess=TRUE) ). A list of available user volumes can be found as the 'userVolumes' property in the dockerComputeDomain object. If userVolumes=Null, then all available user volumes are mounted, with 'needsWriteAccess' = TRUE if the user has Write permissions on the volume.}
\item{dataVolumes}{a list with the names of data volumes that will be mounted to the docker Image. E.g.: dataVolumes=list(list(name='SDSS DAS')). A list of available data volumes can be found as the 'volumes' property in the dockerComputeDomain list. If dataVolumes=NULL, then all available data volumes are mounted.}
\item{dataVolumes}{a list with the names of data volumes (with optional write permissions) that will be mounted to the docker Image. E.g.: dataVolumes=list(list(name='SDSS DAS', needsWriteAccess=FALSE)). A list of available data volumes can be found as the 'volumes' property in the dockerComputeDomain list. If dataVolumes=NULL, then all available data volumes are mounted.}
\item{resultsFolderPath}{full path to results folder (string) where the original notebook is copied to and executed. E.g.: /home/idies/workspace/rootVolume/username/userVolume/jobsFolder. If not set, then a default folder will be set automatically.}
\item{jobAlias}{alias (string) of job, defined by the user.}
}
Expand Down