Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Story/cite 163 - We need a bulk upload for files. #295

Open
wants to merge 24 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
4312013
[CITE-163] Created collection.html page to import collection
PradnyaC11 Aug 22, 2024
d426a75
[CITE-163] Added POST for collection upload in ImportCollectionContro…
PradnyaC11 Aug 23, 2024
fe9e22d
[CITE-163] Added response to uploadCollection method
PradnyaC11 Aug 27, 2024
57563b4
[CITE-163] Fixing errors in upload collection code
PradnyaC11 Sep 3, 2024
505c2f2
[CITE-163] Updated the POST URL for upload collection
PradnyaC11 Sep 4, 2024
a108d4a
[CITE-163] Removed IUploadJobCollection class
PradnyaC11 Oct 4, 2024
c5c9cfc
[CITE-163] Added select for collections on import collection page
PradnyaC11 Nov 12, 2024
2134b97
[CITE-163] Added method to get collections of selected group
PradnyaC11 Nov 13, 2024
da60307
[CITE-163] Updated Upload job to include collectionId
PradnyaC11 Nov 14, 2024
57314c4
[CITE-163] Resolved importing to collection issue
PradnyaC11 Nov 20, 2024
ccb03f8
[CITE-163] Added option to select new collection
PradnyaC11 Nov 22, 2024
5a668b0
[CITE-163] Added methods to create new collection
PradnyaC11 Nov 25, 2024
75f108a
[CITE-163] Resolved issues with create new citation
PradnyaC11 Nov 26, 2024
35a4143
Merge branch 'develop' into story/CITE-163
PradnyaC11 Jan 21, 2025
8bac348
[CITE-163] Updated get group item API call
PradnyaC11 Jan 23, 2025
70cd62b
[CITE-163] Updated ItemTextController to added headers
PradnyaC11 Jan 24, 2025
13d0810
[CITE-163] Updated JobInfoController with giles token
PradnyaC11 Jan 30, 2025
9c4e0e9
[CITE-163] Updated get item API to use headers
PradnyaC11 Feb 6, 2025
073a5a0
[CITE-163] Updated versions in pom.xml
PradnyaC11 Feb 25, 2025
34721c4
[CITE-163] Cleaned-up code
PradnyaC11 Feb 27, 2025
9754d1b
[CITE-163] Removed unwanted changes
PradnyaC11 Feb 27, 2025
3d72e60
[CITE-163] Resolved code factor issue
PradnyaC11 Feb 28, 2025
d19ed0a
Merge branch 'develop' into story/CITE-163
PradnyaC11 Feb 28, 2025
1eba17e
Merge branch 'develop' into story/CITE-163
PradnyaC11 Mar 4, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion citesphere/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
<javers.version>6.2.3</javers.version>
<spring.kafka.version>2.2.6.RELEASE</spring.kafka.version>
<spring-social-zotero.version>0.13</spring-social-zotero.version>
<citesphere.messages.version>0.4</citesphere.messages.version>
<citesphere.messages.version>0.6</citesphere.messages.version>
<citesphere.model.version>1.24</citesphere.model.version>

<admin.password>$2a$04$oQo44vqcDIFRoYKiAXoNheurzkwX9dcNmowvTX/hsWuBMwijqn44i</admin.password>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,10 @@
import edu.asu.diging.citesphere.core.model.jobs.IUploadJob;
import edu.asu.diging.citesphere.core.service.jobs.IUploadJobManager;
import edu.asu.diging.citesphere.core.service.jwt.IJobApiTokenContents;
import edu.asu.diging.citesphere.core.service.oauth.InternalTokenManager;
import edu.asu.diging.citesphere.core.user.IUserManager;
import edu.asu.diging.citesphere.core.zotero.impl.ZoteroTokenManager;
import edu.asu.diging.citesphere.user.IUser;

@Controller
public class JobInfoController extends BaseJobInfoController {
Expand All @@ -37,6 +39,8 @@ public class JobInfoController extends BaseJobInfoController {
@Autowired
private IExportTaskManager exportTaskManager;

@Autowired
private InternalTokenManager internalTokenManager;

@RequestMapping(value="/job/info")
public ResponseEntity<String> getProfile(@RequestHeader HttpHeaders headers) {
Expand All @@ -62,6 +66,9 @@ public ResponseEntity<String> getProfile(@RequestHeader HttpHeaders headers) {
// FIXME: ugly, needs better solution
if (job instanceof IUploadJob) {
node.put("groupId", ((IUploadJob)job).getCitationGroup());
node.put("collectionId", ((IUploadJob)job).getCitationCollection());
IUser user = userManager.findByUsername(job.getUsername());
node.put("giles", internalTokenManager.getAccessToken(user).getValue());
}
if (job instanceof IExportJob) {
IExportTask exportTask = exportTaskManager.get(((IExportJob)job).getTaskId());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ public interface IUploadJob extends IJob {
void setCitationGroup(String citationGroup);

String getCitationGroup();

String getCitationCollection();

void setCitationCollection(String citationCollection);

ICitationGroup getCitationGroupDetail();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ public class UploadJob extends Job implements IUploadJob {
private long fileSize;
private String contentType;
private String citationGroup;
private String citationCollection;
@Transient
private ICitationGroup citationGroupDetail;

Expand Down Expand Up @@ -57,6 +58,14 @@ public void setCitationGroup(String citationGroup) {
this.citationGroup = citationGroup;
}
@Override
public String getCitationCollection() {
return citationCollection;
}
@Override
public void setCitationCollection(String citationCollection) {
this.citationCollection = citationCollection;
}
@Override
public ICitationGroup getCitationGroupDetail() {
return citationGroupDetail;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,10 @@

import java.util.List;

import org.springframework.social.zotero.exception.ZoteroConnectionException;

import edu.asu.diging.citesphere.core.exceptions.GroupDoesNotExistException;
import edu.asu.diging.citesphere.core.exceptions.ZoteroItemCreationFailedException;
import edu.asu.diging.citesphere.model.bib.ICitationCollection;
import edu.asu.diging.citesphere.model.bib.impl.CitationCollectionResult;
import edu.asu.diging.citesphere.user.IUser;
Expand All @@ -20,5 +23,8 @@ List<ICitationCollection> getAllCollections(IUser user, String groupId, String p
throws GroupDoesNotExistException;

void deleteLocalGroupCollections(String groupId);

ICitationCollection createCollection(IUser user, String groupId, String collectionName, String parentCollection)
throws GroupDoesNotExistException, ZoteroItemCreationFailedException, ZoteroConnectionException;

}
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,19 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.PropertySource;
import org.springframework.social.zotero.exception.ZoteroConnectionException;
import org.springframework.stereotype.Service;

import edu.asu.diging.citesphere.core.exceptions.GroupDoesNotExistException;
import edu.asu.diging.citesphere.core.exceptions.ZoteroItemCreationFailedException;
import edu.asu.diging.citesphere.core.service.ICitationCollectionManager;
import edu.asu.diging.citesphere.core.zotero.IZoteroManager;
import edu.asu.diging.citesphere.data.bib.CitationCollectionRepository;
import edu.asu.diging.citesphere.data.bib.CitationGroupRepository;
import edu.asu.diging.citesphere.data.bib.ICollectionMongoDao;
import edu.asu.diging.citesphere.model.bib.ICitationCollection;
import edu.asu.diging.citesphere.model.bib.ICitationGroup;
import edu.asu.diging.citesphere.model.bib.impl.CitationCollection;
import edu.asu.diging.citesphere.model.bib.impl.CitationCollectionResult;
import edu.asu.diging.citesphere.user.IUser;

Expand Down Expand Up @@ -107,4 +110,22 @@ public ICitationCollection getCollection(IUser user, String groupId, String coll
public void deleteLocalGroupCollections(String groupId) {
collectionRepository.deleteByGroupId(groupId);
}

@Override
public ICitationCollection createCollection(IUser user, String groupId, String collectionName, String parentCollection)
throws GroupDoesNotExistException, ZoteroItemCreationFailedException, ZoteroConnectionException {
Optional<ICitationGroup> groupOptional = groupRepository.findFirstByGroupId(new Long(groupId));
if (!groupOptional.isPresent()) {
throw new GroupDoesNotExistException("Group with id " + groupId + " does not exist.");
}
if(parentCollection != null) {
Optional<ICitationCollection> collectionOptional = collectionRepository.findByKey(parentCollection);
if (!collectionOptional.isPresent()) {
throw new GroupDoesNotExistException("Collection with id " + parentCollection + " does not exist.");
}
}

ICitationCollection newCollection = zoteroManager.createCitationCollection(user, groupId, collectionName, parentCollection);
return collectionRepository.save((CitationCollection)newCollection);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
package edu.asu.diging.citesphere.core.service.jobs;

import java.util.List;

import org.springframework.web.multipart.MultipartFile;

import edu.asu.diging.citesphere.core.exceptions.GroupDoesNotExistException;
import edu.asu.diging.citesphere.core.model.jobs.IUploadJob;
import edu.asu.diging.citesphere.user.IUser;

public interface IUploadCollectionJobManager {

List<IUploadJob> createUploadJob(IUser user, MultipartFile[] files, List<byte[]> fileBytes, String groupId, String collectionId) throws GroupDoesNotExistException;


}
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
package edu.asu.diging.citesphere.core.service.jobs.impl;

import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.List;

import javax.transaction.Transactional;

import org.apache.tika.Tika;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.PropertySource;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import edu.asu.diging.citesphere.core.exceptions.FileStorageException;
import edu.asu.diging.citesphere.core.exceptions.GroupDoesNotExistException;
import edu.asu.diging.citesphere.core.exceptions.MessageCreationException;
import edu.asu.diging.citesphere.core.kafka.IKafkaRequestProducer;
import edu.asu.diging.citesphere.core.model.jobs.IUploadJob;
import edu.asu.diging.citesphere.core.model.jobs.JobStatus;
import edu.asu.diging.citesphere.core.model.jobs.impl.JobPhase;
import edu.asu.diging.citesphere.core.model.jobs.impl.UploadJob;
import edu.asu.diging.citesphere.core.repository.jobs.UploadJobRepository;
import edu.asu.diging.citesphere.core.service.IGroupManager;
import edu.asu.diging.citesphere.core.service.jobs.IUploadCollectionJobManager;
import edu.asu.diging.citesphere.core.service.jwt.IJwtTokenService;
import edu.asu.diging.citesphere.core.service.upload.IFileStorageManager;
import edu.asu.diging.citesphere.messages.KafkaTopics;
import edu.asu.diging.citesphere.messages.model.KafkaJobMessage;
import edu.asu.diging.citesphere.model.bib.ICitationGroup;
import edu.asu.diging.citesphere.user.IUser;

@Service
@Transactional
@PropertySource("classpath:/config.properties")
public class UploadCollectionJobManager implements IUploadCollectionJobManager {

private final Logger logger = LoggerFactory.getLogger(getClass());

@Value("${_job_page_size}")
private int jobPageSize;

@Autowired
private UploadJobRepository uploadJobRepository;

@Autowired
private IFileStorageManager fileManager;

@Autowired
private IKafkaRequestProducer kafkaProducer;

@Autowired
private IJwtTokenService tokenService;

@Autowired
private IGroupManager groupManager;

@Override
public List<IUploadJob> createUploadJob(IUser user, MultipartFile[] files, List<byte[]> fileBytes,
String groupId, String collectionId) throws GroupDoesNotExistException {
ICitationGroup group = groupManager.getGroup(user, groupId);
if (group == null) {
throw new GroupDoesNotExistException();
}

List<IUploadJob> jobs = new ArrayList<>();
int i = 0;
for (MultipartFile f : files) {
String filename = f.getOriginalFilename();

byte[] bytes = null;
UploadJob job = new UploadJob();
jobs.add(job);
job.setFilename(filename);
job.setCreatedOn(OffsetDateTime.now());
job.setUsername(user.getUsername());
job.setCitationGroup(groupId);
job.setCitationCollection(collectionId);
job.setPhases(new ArrayList<>());
try {
if (fileBytes != null && fileBytes.size() == files.length) {
bytes = fileBytes.get(i);
} else {
job.setStatus(JobStatus.FAILURE);
job.getPhases().add(new JobPhase(JobStatus.FAILURE,
"There is a mismatch between file metadata and file contents."));
continue;
}

if (bytes == null) {
job.setStatus(JobStatus.FAILURE);
job.getPhases().add(new JobPhase(JobStatus.FAILURE, "There is not file content."));
continue;
}
job = uploadJobRepository.save(job);
fileManager.saveFile(user.getUsername(), job.getId(), filename, bytes);

job.setStatus(JobStatus.PREPARED);
} catch (FileStorageException e) {
logger.error("Could not store file.", e);
job.setStatus(JobStatus.FAILURE);
job.getPhases().add(new JobPhase(JobStatus.FAILURE, e.getMessage()));
continue;
} finally {
i++;
uploadJobRepository.save(job);
}

String contentType = null;

if (bytes != null) {
Tika tika = new Tika();
contentType = tika.detect(bytes);
}

if (contentType == null) {
contentType = f.getContentType();
}

job.setContentType(contentType);
job.setFileSize(f.getSize());
uploadJobRepository.save(job);
String token = tokenService.generateJobApiToken(job);
try {
kafkaProducer.sendRequest(new KafkaJobMessage(token), KafkaTopics.COLLECTION_IMPORT_TOPIC);
} catch (MessageCreationException e) {
logger.error("Could not send Kafka message.", e);
job.setStatus(JobStatus.FAILURE);
job.getPhases().add(new JobPhase(JobStatus.FAILURE, e.getMessage()));
uploadJobRepository.save(job);
}
}

return jobs;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -85,4 +85,6 @@ void clearCollectionItemsCache(IUser user, String groupId, String collectionId,

Map<ItemDeletionResponse, List<String>> deleteMultipleItems(IUser user, String groupId, List<String> citationKeys, Long citationVersion) throws ZoteroConnectionException, ZoteroHttpStatusException;

Collection createCitationCollection(IUser user, String groupId, String collectionName,
String parentCollection) throws ZoteroItemCreationFailedException, ZoteroConnectionException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -81,4 +81,7 @@ CitationResults getCollectionItems(IUser user, String groupId, String collection
long getLatestGroupVersion(IUser user, String groupId);

Map<ItemDeletionResponse, List<String>> deleteMultipleItems(IUser user, String groupId, List<String> citationKeys, Long citationVersion) throws ZoteroConnectionException, ZoteroHttpStatusException;

ICitationCollection createCitationCollection(IUser user, String groupId, String collectionName, String parentCollection) throws ZoteroItemCreationFailedException, ZoteroConnectionException;

}
Original file line number Diff line number Diff line change
Expand Up @@ -369,4 +369,29 @@ public Map<ItemDeletionResponse, List<String>> deleteMultipleItems(IUser user, S
Zotero zotero = getApi(user);
return zotero.getGroupsOperations().deleteMultipleItems(groupId, citationKeys, citationVersion);
}

@Override
public Collection createCitationCollection(IUser user, String groupId, String collectionName,
String parentCollection) throws ZoteroItemCreationFailedException, ZoteroConnectionException {
Zotero zotero = getApi(user);

ItemCreationResponse response = zotero.getGroupCollectionsOperations().createCollection(groupId, collectionName, parentCollection);

// let's give Zotero a minute to process
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
logger.error("Could not sleep.", e);
// well if something goes wrong here, let's just ignore it
}

Map<String, String> success = response.getSuccess();
if (success.isEmpty()) {
logger.error("Could not create collection: " + response.getFailed().get("0"));
throw new ZoteroItemCreationFailedException(response);
}

// since we only submitted one item, there should only be one in the map
return getCitationCollection(user, groupId, success.values().iterator().next());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -538,4 +538,11 @@ public Map<ItemDeletionResponse, List<String>> deleteMultipleItems(IUser user, S
throws ZoteroConnectionException, ZoteroHttpStatusException {
return zoteroConnector.deleteMultipleItems(user, groupId, citationKeys, citationVersion);
}

@Override
public ICitationCollection createCitationCollection(IUser user, String groupId, String collectionName,
String parentCollection) throws ZoteroItemCreationFailedException, ZoteroConnectionException {
Collection collection = zoteroConnector.createCitationCollection(user, groupId, collectionName, parentCollection);
return collectionFactory.createCitationCollection(collection);
}
}
Loading