/*
* CDDL HEADER START
*
* The contents of this file are subject to the terms of the
* Common Development and Distribution License (the "License").
* You may not use this file except in compliance with the License.
*
* See LICENSE.txt included in this distribution for the specific
* language governing permissions and limitations under the License.
*
* When distributing Covered Code, include this CDDL HEADER in each
* file and include the License file at LICENSE.txt.
* If applicable, add the following below this CDDL HEADER, with the
* fields enclosed by brackets "[]" replaced with your own identifying
* information: Portions Copyright [yyyy] [name of copyright owner]
*
* CDDL HEADER END
*/
/*
*/
/*
* Class representing file based storage of per source file history.
*/
private boolean historyIndexDone = false;
public void setHistoryIndexDone() {
historyIndexDone = true;
}
public boolean isHistoryIndexDone() {
return historyIndexDone;
}
/**
* Generate history for single file.
* @param map_entry entry mapping filename to list of history entries
* @param env runtime environment
* @param repository repository object in which the file belongs
* @param test file object
* @param root root of the source repository
* @param renamed true if the files was renamed in the past
*/
/*
* Certain files require special handling - this is mainly for
* files which have been renamed in Mercurial repository.
* This ensures that their complete history (follow) will be
* saved.
*/
if (renamed) {
}
// File based history cache does not store files for individual
// changesets so strip them.
}
// add all history entries
} else {
}
}
// Assign tags to changesets they represent.
}
if (!file.isDirectory()) {
}
}
}
}
}
public void initialize() {
// nothing to do
}
public void optimize() {
// nothing to do
}
// all repositories are supported
return true;
}
/**
* Get a <code>File</code> object describing the cache file.
*
* @param file the file to find the cache for
* @return file that might contain cached history for <code>file</code>
*/
try {
}
} catch (IOException e) {
throw new HistoryException("Failed to get path relative to " +
"source root for " + file, e);
}
}
/**
* Read history from a file.
*/
try {
XMLDecoder d = new XMLDecoder(
return (History) d.readObject();
} catch (IOException e) {
throw e;
}
}
/**
* Store history object (encoded as XML and compressed with gzip) in a file.
*
* @param history history object to store
* @param file file to store the history object into
* @param repo repository for the file
* @throws HistoryException
*/
throw new HistoryException(
}
// Incremental update of the history for this file.
try {
// Merge old history with the new history.
while (li.hasPrevious()) {
}
// Retag the last changesets in case there have been some new
// tags added to the repository. Technically we should just
// retag the last revision from the listOld however this
// does not solve the problem when listNew contains new tags
// retroactively tagging changesets from listOld so we resort
// to this somewhat crude solution.
}
}
}
} catch (IOException ex) {
// Ideally we would want to catch the case when incremental update
// is done but the cached file is not there however we do not have
// the data to do it here.
}
// We have a problem that multiple threads may access the cache layer
// at the same time. Since I would like to avoid read-locking, I just
// serialize the write access to the cache file. The generation of the
// cache file would most likely be executed during index generation, and
// that happens sequencial anyway....
// Generate the file with a temporary name and move it into place when
// I'm done so I don't have to protect the readers for partially updated
// files...
try {
XMLEncoder e = new XMLEncoder(
new BufferedOutputStream(
new GZIPOutputStream(out)))) {
e.setPersistenceDelegate(File.class,
new FilePersistenceDelegate());
e.writeObject(history);
}
} catch (IOException ioe) {
}
synchronized (lock) {
"Failed to remove temporary history cache file");
}
throw new HistoryException(
"Cachefile exists, and I could not delete it.");
}
"Failed to remove temporary history cache file");
}
throw new HistoryException("Failed to rename cache tmpfile.");
}
}
}
"Done storing history for repo {0}",
}
/**
* Store history for the whole repository in directory hierarchy resembling
* the original repository structure. History of individual files will be
* stored under this hierarchy, each file containing history of
* corresponding source file.
*
* @param history history object to process into per-file histories
* @param repository repository object
* @throws HistoryException
*/
throws HistoryException {
// Return immediately when there is nothing to do.
return;
}
"Storing history for repo {0}",
new HashMap<>();
/*
* Go through all history entries for this repository (acquired through
* and parsed into HistoryEntry structures) and create hash map which
* maps file names into list of HistoryEntry structures corresponding
* to changesets in which the file was modified.
*/
// The history entries are sorted from newest to oldest.
latestRev = e.getRevision();
}
/*
* We do not want to generate history cache for files which
* do not currently exist in the repository.
*/
continue;
}
}
/*
* We need to do deep copy in order to have different tags
* per each commit.
*/
} else {
}
}
}
/*
* Now traverse the list of files from the hash map built above
* and for each file store its history (saved in the value of the
* hash map entry for the file) in a file. Skip renamed files
* which will be handled separately below.
*/
try {
if (env.isHandleHistoryOfRenamedFiles() &&
continue;
}
} catch (IOException ex) {
"isRenamedFile() got exception " , ex);
}
}
if (!env.isHandleHistoryOfRenamedFiles()) {
return;
}
/*
* Now handle renamed files (in parallel).
*/
new HashMap<>();
try {
}
} catch (IOException ex) {
"isRenamedFile() got exception ", ex);
}
}
// The directories for the renamed files have to be created before
// the actual files otherwise storeFile() might be racing for
// mkdirs() if there are multiple renamed files from single directory
// handled in parallel.
"Unable to create cache directory ' {0} '.", dir);
}
}
public void run() {
try {
root, true);
// We want to catch any exception since we are in thread.
"doFileHistory() got exception ", ex);
} finally {
}
}
});
}
// Wait for the executors to finish.
try {
// Wait for the executors to finish.
} catch (InterruptedException ex) {
}
}
throws HistoryException {
try {
} catch (Exception e) {
"Error when reading cache file '" + cache, e);
}
}
/*
* Some mirrors of repositories which are capable of fetching history
* for directories may contain lots of files untracked by given SCM.
* For these it would be waste of time to get their history
* since the history of all files in this repository should have been
* fetched in the first phase of indexing.
*/
return null;
}
long time;
try {
} catch (UnsupportedOperationException e) {
// In this case, we've found a file for which the SCM has no history
// An example is a non-SCCS file somewhere in an SCCS-controlled
// workspace.
return null;
}
if (!file.isDirectory()) {
// Don't cache history-information for directories, since the
// history information on the directory may change if a file in
// a sub-directory change. This will cause us to present a stale
// history log until a the current directory is updated and
// invalidates the cache entry.
// retrieving the history takes too long, cache it!
}
}
return history;
}
/**
* Check if the cache is up to date for the specified file.
* @param file the file to check
* @param cachedFile the file which contains the cached history for
* the file
* @return {@code true} if the cache is up to date, {@code false} otherwise
*/
}
/**
* Check if the directory is in the cache.
* @param directory the directory to check
* @return {@code true} if the directory is in the cache
*/
throws HistoryException {
assert directory.isDirectory();
return true;
}
try {
} catch (IOException e) {
throw new HistoryException("Could not resolve " +
}
}
try {
} catch (IOException ex) {
return null;
}
+ this.historyCacheDirName
}
}
/**
* Store latest indexed revision for the repository under data directory.
* @param repository repository
* @param rev latest revision which has been just indexed
*/
try {
} catch (IOException ex) {
LOGGER.log(Level.WARNING, "Cannot write latest cached revision to file for "+repository.getDirectoryName(),
ex);
} finally {
try {
}
} catch (IOException ex) {
}
}
}
try {
try {
return null;
} finally {
try {
}
}
return null;
}
return rev;
}
// We don't have a good way to get this information from the file
// cache, so leave it to the caller to find a reasonable time to
// display (typically the last modified time on the file system).
return Collections.emptyMap();
}
// remove the file cached last revision (done separately in case
// it gets ever moved outside of the hierarchy)
// Remove all files which constitute the history cache.
try {
} catch (IOException ex) {
}
}
return getClass().getSimpleName();
}
}