summaryrefslogtreecommitdiff
path: root/archives
diff options
context:
space:
mode:
authorNorbert Thiebaud <nthiebaud@gmail.com>2011-08-18 19:27:33 -0500
committerNorbert Thiebaud <nthiebaud@gmail.com>2011-08-18 19:27:33 -0500
commit5c9489cbee72de93027af0ad4ea5b6435ba54096 (patch)
tree57bd7f99ff72fe9b6fcb725b2a84fa585e3fee82 /archives
parenta1e4666f01bc5d20e285876e7e06a22f6f18a8da (diff)
move one-git convertion related tools to archives to clean-up root
Diffstat (limited to 'archives')
-rw-r--r--archives/clean_spaces/Makefile15
-rw-r--r--archives/clean_spaces/clean_spaces.c558
-rw-r--r--archives/lo_git_rewrite/Makefile18
-rwxr-xr-xarchives/lo_git_rewrite/lo_git_rewritebin0 -> 22169 bytes
-rw-r--r--archives/lo_git_rewrite/lo_git_rewrite.c910
-rw-r--r--archives/onegit/README237
-rwxr-xr-xarchives/onegit/checkgit.sh54
-rwxr-xr-xarchives/onegit/onegit.sh322
-rw-r--r--archives/onegit/patches/0001-adjust-.gitignore-for-new-combin.patch234
-rw-r--r--archives/onegit/patches/0002-adjust-the-list-of-external-git-.patch25
-rw-r--r--archives/onegit/patches/0003-remove-clone-calc-reference-in-lotuswordpro-qa.patch27
-rw-r--r--archives/onegit/patches/0004-remove-clone-calc-references-in-filters-test.cxx.patch48
-rw-r--r--archives/onegit/patches/0005-move-GIT_REPO_NAMES-init-up.patch32
-rw-r--r--archives/onegit/patches/0006-binfilter-is-now-a-separate-repo.patch24
-rw-r--r--archives/onegit/patches/0007-dictionaries-is-now-in-a-separate-optional-repo.patch24
-rw-r--r--archives/onegit/patches/0008-bootstrap-is-now-core-in-.-g.patch43
-rw-r--r--archives/onegit/patches/0009-do-not-use-clone-in-paht-for-test-in-hwpfilter.patch27
-rw-r--r--archives/onegit/patches/0010-do-not-use-clone-in-path-for-test-in-sw.patch46
-rw-r--r--archives/onegit/patches/0011-another-round-of-hard-coded-clone-whack-a-mole-in-sv.patch36
-rw-r--r--archives/onegit/patches/0012-do-not-use-hard-code-clone-in-writerfilter-tests.patch27
20 files changed, 2707 insertions, 0 deletions
diff --git a/archives/clean_spaces/Makefile b/archives/clean_spaces/Makefile
new file mode 100644
index 0000000..be49cd0
--- /dev/null
+++ b/archives/clean_spaces/Makefile
@@ -0,0 +1,15 @@
+
+OS=$(shell uname)
+
+ifeq ($(OS), Darwin)
+COPT=
+else
+COPT="-march=native"
+endif
+
+all: clean_spaces
+
+clean_spaces: clean_spaces.c
+ gcc -O2 $(COPT) -pthread clean_spaces.c -o clean_spaces
+
+
diff --git a/archives/clean_spaces/clean_spaces.c b/archives/clean_spaces/clean_spaces.c
new file mode 100644
index 0000000..6dfab09
--- /dev/null
+++ b/archives/clean_spaces/clean_spaces.c
@@ -0,0 +1,558 @@
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/mman.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <string.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <assert.h>
+#include <pthread.h>
+#include <errno.h>
+
+#ifdef __APPLE__
+#define MAP_POPULATE 0
+#endif
+
+struct item
+{
+ struct item* next;
+ char* filename;
+};
+
+struct context
+{
+ pthread_mutex_t mutex;
+ pthread_cond_t cond;
+ struct item* head;
+ struct item* items_pool;
+ int free_items;
+ int done;
+ int nb_workers;
+ char* output;
+ int allocated_output;
+};
+
+
+static char* filter[] =
+{
+ "c","cpp","cxx","h","hrc","hxx","idl","inl","java","map","pl","pm","sdi","sh","src","tab","xcu","xml"
+};
+
+static int _is_filter_candidat(char* extension)
+{
+ int first = 0;
+ int last = sizeof(filter)/sizeof(char*);
+ int next;
+ int cmp;
+ char* cursor;
+
+ while(last > first)
+ {
+ next = (first + last) >> 1;
+ cursor = filter[next];
+ cmp = strcmp(cursor, extension);
+ if(cmp > 0)
+ {
+ last = next;
+ }
+ else if(cmp < 0)
+ {
+ first = next + 1;
+ }
+ else
+ {
+ return 1;
+ }
+ }
+ return 0;
+}
+
+static struct item* _wait_for_item(struct context* context)
+{
+struct item* item;
+
+ pthread_mutex_lock(&context->mutex);
+ while(context->head == NULL)
+ {
+ if(context->done)
+ {
+ break;
+ }
+ pthread_cond_wait(&context->cond, &(context->mutex));
+ }
+ item = context->head;
+ if(item)
+ {
+ context->head = item->next;
+ if(item->next)
+ {
+ pthread_cond_broadcast(&context->cond);
+ }
+ }
+ pthread_mutex_unlock(&context->mutex);
+ return item;
+}
+
+int _post_item(struct context* context, struct item* item)
+{
+ pthread_mutex_lock(&context->mutex);
+ item->next = context->head;
+ context->head = item;
+ if(!item->next)
+ {
+ pthread_cond_signal(&context->cond);
+ }
+ pthread_mutex_unlock(&context->mutex);
+}
+
+static int _do_one_file(char* filename, char** output, int* allocated_output)
+{
+int fd;
+int col;
+int rewrite;
+char* input;
+char* end;
+char* cursor_in;
+char* after_last_non_space;
+char* cursor_out = NULL;
+off_t size = 0;
+struct stat s;
+
+// fprintf(stderr,"process %s\n", filename);
+ /* look for the extension, ignore pure dot filename */
+ cursor_in = filename + strlen(filename);
+ while(cursor_in > filename && *cursor_in != '.')
+ {
+ cursor_in -= 1;
+ }
+ if(cursor_in == filename)
+ {
+ return 0;
+ }
+ /* check that the extention is candidat for filtering */
+ if(!_is_filter_candidat(cursor_in + 1))
+ {
+ return 0;
+ }
+ if(stat(filename, &s))
+ {
+ fprintf(stderr, "*** Error on stat for %s\n", filename);
+ return 0;
+ }
+ /* we filter only non-zero sized regular file */
+ if(S_ISREG(s.st_mode))
+ {
+ size = s.st_size;
+ }
+ if(!size)
+ {
+ return 0;
+ }
+ fd = open(filename, O_RDONLY);
+ if(fd != -1)
+ {
+ input = mmap( NULL, size, PROT_READ, MAP_PRIVATE | MAP_POPULATE, fd, 0);
+ if(input != MAP_FAILED)
+ {
+ cursor_in = input;
+ end = input;
+ end += size;
+ after_last_non_space = cursor_in;
+ col = 0;
+ rewrite = 0;
+ /* first find the first occurence if any of things needing a rewrite */
+ while(cursor_in < end)
+ {
+ /* short-cut the most common case */
+ if(*cursor_in > 32)
+ {
+ cursor_in += 1;
+ col += 1;
+ after_last_non_space = cursor_in;
+ }
+ else if(*cursor_in == '\n')
+ {
+ if(cursor_in != after_last_non_space)
+ {
+ rewrite = 1;
+ break;
+ }
+ else
+ {
+ cursor_in += 1;
+ after_last_non_space = cursor_in;
+ col = 0;
+ }
+ }
+ else if(*cursor_in == ' ')
+ {
+ cursor_in += 1;
+ col += 1;
+ }
+ else if(*cursor_in == '\t')
+ {
+ rewrite = 1;
+ break;
+ }
+ else
+ {
+ cursor_in += 1;
+ col += 1;
+ after_last_non_space = cursor_in;
+ }
+ }
+ close(fd);
+ if(rewrite)
+ {
+ /* since we need a rewrite, we need to copy the beginning of the file
+ * al the way to the first anomaly and fix the current anomally */
+ /* in theory teh file could be all tabs... so the output could grow 4 times */
+ if((4 * size) >= *allocated_output)
+ {
+ int new_size = (((size+1) * 4) + 32768) & ~32767; /* round up to the next 32K */
+
+ *output = realloc(*output, new_size);
+// fprintf(stderr, "realloc from %d to %d\n", allocated_output, new_size);
+ *allocated_output = new_size;
+ }
+ if(*output)
+ {
+ int pre_len = 0;
+
+ cursor_out = *output;
+
+ if(*cursor_in == '\t')
+ {
+ pre_len = (int)(cursor_in - input);
+ if(pre_len > 1)
+ {
+ memcpy(*output, input, pre_len);
+ cursor_out += pre_len;
+ }
+ /* from now on after_last_non_space point into the output buffer *
+ * technicaly it always have, but up to now the output buffer was
+ * also the input buffer */
+ pre_len = (int)(after_last_non_space - input);
+ after_last_non_space = *output;
+ after_last_non_space += pre_len;
+
+ /* expend the tab to the correct number of spaces */
+ pre_len = (~col & 3);
+ memset(cursor_out, ' ', 4);
+ cursor_out += pre_len + 1;
+ col += pre_len + 1;
+ cursor_in += 1;
+ }
+ else if(*cursor_in == '\n')
+ {
+ pre_len = (int)(after_last_non_space - input);
+ if(pre_len > 0)
+ {
+ memcpy(*output, input, pre_len);
+ cursor_out += (pre_len);
+ }
+ *cursor_out++ = '\n';
+ cursor_in += 1;
+ after_last_non_space = cursor_out;
+ col = 0;
+ }
+ else
+ {
+ /* that should not happen */
+ abort();
+ }
+ /* clean-up the rest of the file as-needed*/
+ while(cursor_in < end)
+ {
+ /* short-cut the most common case */
+ if(*cursor_in > 32)
+ {
+ *cursor_out++ = *cursor_in++;
+ col += 1;
+ after_last_non_space = cursor_out;
+ }
+ else if(*cursor_in == '\n')
+ {
+ if(cursor_out != after_last_non_space)
+ {
+ *after_last_non_space++ = *cursor_in++;
+ cursor_out = after_last_non_space;
+ }
+ else
+ {
+ *cursor_out++ = *cursor_in++;
+ after_last_non_space = cursor_out;
+ }
+ col = 0;
+ }
+ else if(*cursor_in == ' ')
+ {
+ *cursor_out++ = *cursor_in++;
+ col += 1;
+ }
+ else if(*cursor_in == '\t')
+ {
+ pre_len = (~col & 3);
+ memset(cursor_out, ' ', 4);
+ cursor_out += pre_len + 1;
+ col += pre_len + 1;
+ cursor_in += 1;
+ }
+ else
+ {
+ *cursor_out++ = *cursor_in++;
+ col += 1;
+ after_last_non_space = cursor_out;
+ }
+ }
+ if(after_last_non_space != cursor_out)
+ {
+ /* we have space on the last line without \n at the end */
+ *after_last_non_space++ = '\n';
+ cursor_out = after_last_non_space;
+ }
+ fd = open(filename, O_WRONLY | O_TRUNC);
+ if(fd != -1)
+ {
+ if(cursor_out == *output)
+ {
+ /* empty_file */
+ }
+ else
+ {
+ ssize_t written;
+
+ written = write(fd, *output, (size_t)(cursor_out - *output));
+ if(written != (ssize_t)(cursor_out - *output))
+ {
+ fprintf(stderr, "*** Error writing %s\n", filename);
+ }
+ }
+ close(fd);
+ }
+ else
+ {
+ fprintf(stderr, "*** Error re-opening %s for write\n", filename);
+ }
+ }
+ else
+ {
+ abort();
+ }
+ }
+ munmap(input, size);
+ }
+ else
+ {
+ close(fd);
+ }
+ }
+ else
+ {
+ fprintf(stderr, "*** Error on open for %s\n", filename);
+ }
+ return 0;
+}
+
+static void* _worker_proc(struct context* context)
+{
+char* output = NULL;
+int allocated_output = 1024*1024;
+struct item* item;
+
+ output = malloc(allocated_output);
+ while((item = _wait_for_item(context)) != NULL)
+ {
+ _do_one_file(item->filename, &output, &allocated_output);
+ }
+ return NULL;
+}
+
+static struct item* _get_item(struct context* context)
+{
+struct item* item;
+
+ if(context->free_items <= 0)
+ {
+ /* yes this leak... we we don't care. it is not worth the effort
+ * to synchornize stuff to know when to recycle an item
+ * i.e when it is safe to free a items_pool block
+ */
+ context->items_pool = (struct item*)calloc(4096, sizeof(struct item));
+ context->free_items = 4095;
+ }
+ item = &(context->items_pool[context->free_items]);
+ context->free_items -= 1;
+ return item;
+}
+
+static char* _consume_input(struct context* context, char* fn_cursor, char* fn_tail)
+{
+char* filename;
+struct item* item;
+
+ while(fn_cursor <= fn_tail)
+ {
+ filename = fn_cursor;
+ while(*fn_cursor && *fn_cursor != '\n')
+ {
+ fn_cursor += 1;
+ }
+ if(*fn_cursor =='\n')
+ {
+ *fn_cursor = 0;
+ fn_cursor += 1;
+ if(context->nb_workers > 1)
+ {
+ item = _get_item(context);
+ item->filename = filename;
+// fprintf(stderr, "post %s\n", filename);
+ _post_item(context, item);
+ }
+ else
+ {
+ _do_one_file(filename, &context->output, &context->allocated_output);
+ }
+ }
+ else
+ {
+ fn_cursor = filename;
+ break;
+ }
+ }
+ return fn_cursor;
+}
+
+static void _usage(void)
+{
+ puts("Usage: clean_spaces [-p <nb_of_worker_thread>\n");
+ puts("stdin contain the list of file to process (one file per line)\n");
+}
+
+int main(int argc, char** argv)
+{
+int rc = 0;
+int i;
+struct context context;
+pthread_t* workers_tid;
+pthread_mutexattr_t mutex_attribute;
+pthread_condattr_t cond_attribute;
+char* fn_buffer;
+/* Note:FN_BUFFER_SIZE has been sized to fit the largest output expected
+ * from git ls-files by a margin factor > 4 so we don't care for
+ * case where stdin is biger than that. actually we fail with rc=1 if that
+ * happen
+ */
+#define FN_BUFFER_SIZE (2*1024*1024)
+char* fn_cursor;
+char* fn_head;
+char* fn_tail;
+int fn_used = 0;
+int fn_read = 0;
+
+ memset(&context, 0, sizeof(struct context));
+ context.nb_workers = sysconf(_SC_NPROCESSORS_ONLN);
+ if(context.nb_workers < 1)
+ {
+ context.nb_workers = 1;
+ }
+
+ for( i = 1; !rc && i < argc; i++)
+ {
+ if(!strcmp(argv[i], "-h"))
+ {
+ _usage();
+ return 0;
+ }
+ else if(!strcmp(argv[i], "-p"))
+ {
+ i += 1;
+ if( i < argc)
+ {
+ context.nb_workers = atoi(argv[i]);
+ if(context.nb_workers < 0 || context.nb_workers > 512)
+ {
+ _usage();
+ return 8;
+ }
+ }
+ else
+ {
+ _usage();
+ return 8;
+ }
+ }
+ else
+ {
+ _usage();
+ return 8;
+ }
+ }
+
+ if(context.nb_workers > 1)
+ {
+ workers_tid = calloc(context.nb_workers, sizeof(pthread_t));
+
+ pthread_mutexattr_init(&mutex_attribute);
+ pthread_condattr_init(&cond_attribute);
+ pthread_mutex_init(&context.mutex, &mutex_attribute);
+ pthread_cond_init(&context.cond, &cond_attribute);
+
+ for(i = 0; i < context.nb_workers; i++)
+ {
+ pthread_create(&(workers_tid[i]), NULL, (void* (*)(void*))_worker_proc, &context);
+ }
+ }
+ else
+ {
+ context.allocated_output = 1024*1024;
+ context.output = malloc(context.allocated_output);
+ }
+
+ fn_buffer = malloc(FN_BUFFER_SIZE);
+ fn_tail = fn_cursor = fn_buffer;
+
+ for(;;)
+ {
+ fn_read = read(STDIN_FILENO, fn_buffer + fn_used, FN_BUFFER_SIZE - fn_used);
+ if(fn_read > 0)
+ {
+ fn_used += fn_read;
+ fn_tail += fn_read;
+ *fn_tail = 0;
+ fn_cursor = _consume_input(&context, fn_cursor, fn_tail);
+ if(fn_used == FN_BUFFER_SIZE)
+ {
+ rc = 1;
+ break;
+ }
+ }
+ else
+ {
+ if(fn_read == 0)
+ {
+ break;
+ }
+ else
+ {
+ if(errno != EINTR)
+ {
+ rc = -1;
+ break;
+ }
+ }
+ }
+ }
+ if(context.nb_workers > 1)
+ {
+ context.done = 1;
+ pthread_cond_broadcast(&context.cond);
+ for( i = 0; i < context.nb_workers; i++)
+ {
+ pthread_join(workers_tid[i], NULL);
+ }
+ }
+ return rc;
+}
diff --git a/archives/lo_git_rewrite/Makefile b/archives/lo_git_rewrite/Makefile
new file mode 100644
index 0000000..60cde48
--- /dev/null
+++ b/archives/lo_git_rewrite/Makefile
@@ -0,0 +1,18 @@
+
+OS=$(shell uname)
+
+ifeq ($(OS), Darwin)
+COPT=
+else
+COPT="-march=native"
+endif
+
+all: lo_git_rewrite
+
+clean:
+ rm -f lo_git_rewrite
+
+clean_spaces: lo_git_rewrite.c
+ gcc -O2 $(COPT) log_git_rewrite.c -o lo_git_rewrite
+
+
diff --git a/archives/lo_git_rewrite/lo_git_rewrite b/archives/lo_git_rewrite/lo_git_rewrite
new file mode 100755
index 0000000..a2c69ad
--- /dev/null
+++ b/archives/lo_git_rewrite/lo_git_rewrite
Binary files differ
diff --git a/archives/lo_git_rewrite/lo_git_rewrite.c b/archives/lo_git_rewrite/lo_git_rewrite.c
new file mode 100644
index 0000000..b73aba7
--- /dev/null
+++ b/archives/lo_git_rewrite/lo_git_rewrite.c
@@ -0,0 +1,910 @@
+
+#include <unistd.h>
+#include <string.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <assert.h>
+#include <pthread.h>
+#include <errno.h>
+#include <fcntl.h>
+
+static struct buffer
+{
+ char* data;
+ char* head;
+ char* tail;
+ char* cursor;
+ char* workspace;
+ int allocated_workspace;
+ int free;
+ int allocated;
+ int nb_commit;
+ int nb_blob;
+ int nb_cleaned;
+ int nb_not_cleaned;
+ int nb_tag;
+ char* exclude_suffix;
+ int suffix_len;
+ char* exclude_module;
+ int module_len;
+ char* filter_module;
+ char* module;
+ int alternate_commit;
+ int exclude_download;
+
+} g_buffer;
+
+static char* g_prefix = "";
+
+#define kBUFFER_SIZE (30*1024*1024)
+
+static void _realign_buffer(struct buffer* buffer)
+{
+int offset = 0;
+int used;
+char* d;
+char* s;
+
+ fprintf(stderr, "%s commit:%d tag:%d blob:%d cleaned:%d not_cleaned:%d\n", g_prefix,
+ buffer->nb_commit, buffer->nb_tag, buffer->nb_blob, buffer->nb_cleaned, buffer->nb_not_cleaned);
+// fprintf(stderr, "-> realligned buffer: datqa:%p head:%p, cursor:%p tail:%p free:%d nb_commit=%d\n",
+// buffer->data, buffer->head, buffer->cursor, buffer->tail, buffer->free, buffer->nb_commit);
+ if(buffer->head > buffer->data)
+ {
+ offset = buffer->cursor - buffer->head;
+ used = (buffer->tail - buffer->head);
+// fprintf(stderr, "realligned buffer: free=%d offset=%d used=%d\n", buffer->free, offset, used);
+// fprintf(stderr, "partial:|%.*s|\n", offset , buffer->head);
+ d = buffer->data;
+ s = buffer->head;
+ while(s <= buffer->tail)
+ {
+ *d++ = *s++;
+ }
+ *d = 0;
+ buffer->head = buffer->data;
+ buffer->cursor = buffer->head + offset;
+ buffer->tail = buffer->head + used;
+ buffer->free = buffer->allocated - used;
+ assert(buffer->free > 4096);
+ }
+// fprintf(stderr, "<- realligned buffer: datqa:%p head:%p, cursor:%p tail:%p free=%d\n",
+// buffer->data, buffer->head, buffer->cursor, buffer->tail, buffer->free);
+}
+
+static int _read_more(struct buffer* buffer)
+{
+int received;
+
+ if(buffer->free < 4096)
+ {
+ _realign_buffer(buffer);
+ }
+ Retry:
+ if(buffer->free == 0)
+ {
+ fprintf(stderr, "%s read_more error free=0: data:%p head:%p, cursor:%p tail:%p free:%d nb_commit=%d\n", g_prefix,
+ buffer->data, buffer->head, buffer->cursor, buffer->tail, buffer->free, buffer->nb_commit);
+ abort();
+ }
+ assert(buffer->free > 0);
+ received = read(STDIN_FILENO, buffer->tail, buffer->free);
+ if(received > 0)
+ {
+ buffer->tail += received;
+ buffer->free -= received;
+ }
+ else
+ {
+ if(received == 0)
+ {
+ fprintf(stderr, "_read_more premature EOF\n");
+ exit(100);
+ }
+ else
+ {
+ if(errno != EINTR)
+ {
+ fprintf(stderr, "_read_more error:%d\n", errno);
+ exit(1);
+ }
+ else
+ {
+ goto Retry;
+ }
+ }
+ }
+}
+
+static void _read_in_full(struct buffer* buffer, int len)
+{
+ while(buffer->cursor + len >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+}
+
+static void _write_in_full(char* data, int len)
+{
+ int written;
+ int done = 0;
+
+// write(STDERR_FILENO, data + done, len - done);
+ if(len == 0)
+ {
+ return;
+ }
+ assert(len > 0);
+ do
+ {
+ Retry:
+ written = write(STDOUT_FILENO, data + done, len - done);
+ if(written > 0)
+ {
+ done += written;
+ }
+ else
+ {
+ if(written == -1 && errno == EINTR)
+ {
+ goto Retry;
+ }
+ fprintf(stderr, "_write_in_full error:%d\n", errno);
+ exit(1);
+ }
+ }
+ while(done < len);
+}
+
+static void _copy_line(struct buffer* buffer)
+{
+int rc = 0;
+int pos = 0;
+int space_count = 0;
+int id_pos = 0;
+int id_end_pos = -1;
+
+ while(buffer->cursor[pos] != '\n')
+ {
+ if(buffer->cursor[pos] == ' ')
+ {
+ space_count += 1;
+ if(space_count == 2)
+ {
+ id_end_pos = pos;
+ }
+ }
+ else if(buffer->cursor[pos] == ':')
+ {
+ if(space_count == 1)
+ {
+ id_pos = pos + 1;
+ }
+ }
+ pos += 1;
+ if(buffer->cursor + pos >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ }
+// fprintf(stderr, "copy_line(id_pos:%d):|%.*s|\n", id_pos, pos+1, buffer->cursor);
+ if(id_pos)
+ {
+ if(id_end_pos == -1)
+ {
+ _write_in_full(buffer->cursor, pos);
+ _write_in_full("0\n", 2);
+ }
+ else
+ {
+ _write_in_full(buffer->cursor, id_end_pos);
+ _write_in_full("0", 1);
+ _write_in_full(buffer->cursor + id_end_pos, (pos + 1) - id_end_pos);
+ }
+ }
+ else
+ {
+ _write_in_full(buffer->cursor, pos + 1);
+ }
+ buffer->cursor += pos + 1;
+}
+
+static void _copy_data(struct buffer* buffer)
+{
+int pos = 0;
+int data_len;
+char* cursor;
+
+ while(buffer->cursor[pos] != '\n')
+ {
+ pos += 1;
+ if(buffer->cursor + pos >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ }
+ cursor = buffer->cursor + 5;
+ data_len = 0;
+ while(*cursor != '\n')
+ {
+ data_len *= 10;
+ data_len += *cursor - '0';
+ cursor += 1;
+ }
+// fprintf(stderr, "data %d\n", data_len);
+ _write_in_full(buffer->cursor, pos + 1);
+ buffer->cursor += pos + 1;
+ if(buffer->cursor + (data_len) >= buffer->tail)
+ {
+ _read_in_full(buffer, data_len);
+ }
+ _write_in_full(buffer->cursor, data_len);
+ buffer->cursor += data_len;
+}
+
+static void _write_int(int len)
+{
+ char temp[13];
+ char* cursor;
+
+ cursor = temp + 12;
+ *cursor-- = 0;
+ *cursor = '\n';
+ do
+ {
+ cursor -= 1;
+ *cursor = (len % 10) + '0';
+ len /= 10;
+ }
+ while(len);
+ _write_in_full(cursor, strlen(cursor));
+}
+
+int _convert_data(struct buffer* buffer, char* input, int len)
+{
+int col;
+int pre_len;
+char* end;
+char* cursor_in;
+char* after_last_non_space;
+char* cursor_out = buffer->workspace;
+
+ cursor_in = input;
+ end = input;
+ end += len;
+ after_last_non_space = cursor_out;
+ col = 0;
+ while(cursor_in < end)
+ {
+ /* short-cut the most common case */
+ if(*cursor_in > 32)
+ {
+ *cursor_out++ = *cursor_in++;
+ col += 1;
+ after_last_non_space = cursor_out;
+ }
+ else if(*cursor_in == '\n')
+ {
+ if(cursor_out != after_last_non_space)
+ {
+ *after_last_non_space++ = *cursor_in++;
+ cursor_out = after_last_non_space;
+ }
+ else
+ {
+ *cursor_out++ = *cursor_in++;
+ after_last_non_space = cursor_out;
+ }
+ col = 0;
+ }
+ else if(*cursor_in == ' ')
+ {
+ *cursor_out++ = *cursor_in++;
+ col += 1;
+ }
+ else if(*cursor_in == '\t')
+ {
+ pre_len = (~col & 3);
+ memset(cursor_out, ' ', 4);
+ cursor_out += pre_len + 1;
+ col += pre_len + 1;
+ cursor_in += 1;
+ }
+ else
+ {
+ *cursor_out++ = *cursor_in++;
+ col += 1;
+ after_last_non_space = cursor_out;
+ }
+ }
+ return cursor_out - buffer->workspace;
+}
+
+static void _process_blob(struct buffer* buffer)
+{
+int h1;
+int h2;
+int data_len;
+int new_len;
+
+ buffer->cursor = buffer->head;
+ if(buffer->head + 12 > buffer->tail)
+ {
+ _read_in_full(buffer, 12);
+ }
+ if(memcmp(buffer->head, "blob\nmark :", 11))
+ {
+ fprintf(stderr, "unknown command %11.11s\n", buffer->head);
+ exit(1);
+ }
+ buffer->cursor += 11;
+ while(*buffer->cursor != '\n')
+ {
+ buffer->cursor += 1;
+ if(buffer->cursor >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ }
+ h1 = buffer->cursor - buffer->head;
+ if(buffer->cursor + 7 >= buffer->tail)
+ {
+ _read_in_full(buffer, 7);
+ }
+ if(memcmp(buffer->cursor, "\ndata ", 6))
+ {
+ fprintf(stderr, "unknown command %6.6s\n", buffer->cursor);
+ exit(1);
+ }
+ buffer->cursor += 6;
+ data_len = 0;
+ while(*buffer->cursor != '\n')
+ {
+ data_len *= 10;
+ data_len += *buffer->cursor - '0';
+ buffer->cursor += 1;
+ if(buffer->cursor >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ }
+ h2 = buffer->cursor - buffer->head;
+ if(buffer->cursor + (data_len + 1) >= buffer->tail)
+ {
+ _read_in_full(buffer, data_len + 1);
+ }
+ _write_in_full(buffer->head, h1);
+ _write_in_full("0", 1);
+ _write_in_full(buffer->head + h1, data_len + (h2 - h1) + 2);
+ new_len = _convert_data(buffer, buffer->head + h2 + 1, data_len);
+ _write_in_full(buffer->head, h1);
+ _write_in_full("1", 1);
+ _write_in_full(buffer->head + h1, 6);
+ _write_int(new_len);
+ _write_in_full(buffer->workspace, new_len);
+ _write_in_full("\n",1);
+ buffer->head += data_len + h2 + 2;
+ buffer->cursor = buffer->head;
+ buffer->nb_blob += 1;
+}
+
+static char* filter[] =
+{
+ "c","cpp","cxx","h","hrc","hxx","idl","inl","java","map","pl","pm","sdi","sh","src","tab","xcu","xml"
+};
+
+static int _is_filter_candidat(char* fn, int len)
+{
+int first = 0;
+int last = sizeof(filter)/sizeof(char*);
+int next;
+int cmp;
+char* cursor;
+char* extension;
+char temp[10];
+
+ len -= 1;
+ cursor = fn + len;
+ extension = temp + 9;
+ *extension-- = 0;
+
+ while(extension > temp && len > 0)
+ {
+ if(*cursor == '.')
+ {
+ break;
+ }
+ *extension-- = *cursor--;
+ len -= 1;
+ }
+ if(*cursor != '.')
+ {
+ return 0;
+ }
+ extension += 1;
+
+ while(last > first)
+ {
+ next = (first + last) >> 1;
+ cursor = filter[next];
+ cmp = strcmp(cursor, extension);
+ if(cmp > 0)
+ {
+ last = next;
+ }
+ else if(cmp < 0)
+ {
+ first = next + 1;
+ }
+ else
+ {
+ return 1;
+ }
+ }
+ return 0;
+}
+
+static int _is_kept(struct buffer* buffer, char* name, int len)
+{
+int i;
+int keep = 1;
+int match;
+
+ if(buffer->module)
+ {
+ match = 1;
+ for(i = 0; i < len && i < buffer->module_len; i++)
+ {
+ if(name[i] != buffer->module[i])
+ {
+ match = 0;
+ break;
+ }
+ }
+ if(match && (i >= len || name[i] != '/'))
+ {
+ match = 0;
+ }
+ if(buffer->exclude_module)
+ {
+ keep = !match;
+ }
+ else
+ {
+ keep = match;
+ }
+ }
+ if(keep && buffer->exclude_download)
+ {
+ for(i = 0; i < len; i++)
+ {
+ if(name[i] == '/')
+ {
+ break;
+ }
+ }
+ if(len - i > 10)
+ {
+ if(!memcmp(name + i, "/download/" , 10))
+ {
+// fprintf(stderr, "%s exclude-download: %.*s\n", g_prefix, len, name);
+ keep = 0;
+ }
+ }
+ }
+ if(keep && buffer->exclude_suffix)
+ {
+ if(len > buffer->suffix_len)
+ {
+ if(!memcmp(name + (len - buffer->suffix_len), buffer->exclude_suffix, buffer->suffix_len))
+ {
+// fprintf(stderr, "%s exclude-suffix: %.*s\n", g_prefix, len, name);
+ keep = 0;
+ }
+ }
+ }
+
+ return keep;
+}
+
+static void _filter_commit_action(struct buffer* buffer)
+{
+ int i = 0;
+ int id_pos = -1;
+ int mode_pos = -1;
+ int path_pos = -1;
+
+ while(buffer->cursor[i] != '\n')
+ {
+ if(buffer->cursor[i] == ' ')
+ {
+ if(mode_pos < 0 )
+ {
+ mode_pos = i + 1;
+ }
+ else if(id_pos < 0)
+ {
+ id_pos = i + 1;
+ }
+ else if(path_pos < 0)
+ {
+ path_pos = i + 1;
+ }
+ }
+ i += 1;
+ if(buffer->cursor + i >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ }
+// fprintf(stderr, "commit_action (id_pos=%d path_pos=%d):|%.*s|\n",id_pos, path_pos, i + 1, buffer->cursor);
+ if(_is_kept(buffer, buffer->cursor + path_pos, i - path_pos))
+ {
+ _write_in_full(buffer->cursor, path_pos - 1);
+ if(_is_filter_candidat(buffer->cursor + path_pos, i - path_pos))
+ {
+ _write_in_full("1", 1);
+ buffer->nb_cleaned += 1;
+ }
+ else
+ {
+ _write_in_full("0", 1);
+ buffer->nb_not_cleaned += 1;
+ }
+ _write_in_full(buffer->cursor + path_pos - 1, i - path_pos + 2);
+ }
+ buffer->cursor += i + 1;
+}
+
+static void _filter_line(struct buffer* buffer)
+{
+ int i = 0;
+ int path_pos = -1;
+
+ while(buffer->cursor[i] != '\n')
+ {
+ i += 1;
+ if(buffer->cursor + i >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ }
+
+ if(_is_kept(buffer, buffer->cursor + 2, i - 2))
+ {
+ _write_in_full(buffer->cursor, i + 1);
+ }
+ buffer->cursor += i + 1;
+}
+
+static void _process_commit_action(struct buffer* buffer)
+{
+ int i = 0;
+ int id_pos = -1;
+ int mode_pos = -1;
+ int path_pos = -1;
+
+ while(buffer->cursor[i] != '\n')
+ {
+ if(buffer->cursor[i] == ' ')
+ {
+ if(mode_pos < 0 )
+ {
+ mode_pos = i + 1;
+ }
+ else if(id_pos < 0)
+ {
+ id_pos = i + 1;
+ }
+ else if(path_pos < 0)
+ {
+ path_pos = i + 1;
+ }
+ }
+ i += 1;
+ if(buffer->cursor + i >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ }
+// fprintf(stderr, "commit_action (id_pos=%d path_pos=%d):|%.*s|\n",id_pos, path_pos, i + 1, buffer->cursor);
+ _write_in_full(buffer->cursor, path_pos - 1);
+ if(_is_filter_candidat(buffer->cursor + path_pos, i - path_pos))
+ {
+ _write_in_full("1", 1);
+ buffer->nb_cleaned += 1;
+ }
+ else
+ {
+ _write_in_full("0", 1);
+ buffer->nb_not_cleaned += 1;
+ }
+ _write_in_full(buffer->cursor + path_pos - 1, i - path_pos + 2);
+ buffer->cursor += i + 1;
+}
+
+static int _process_commit(struct buffer* buffer)
+{
+int rc = 0;
+
+// fprintf(stderr, "-->process_commit\n");
+ buffer->cursor = buffer->head;
+ while(*buffer->cursor != 'd')
+ {
+ _copy_line(buffer);
+ if(buffer->cursor >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ }
+ _copy_data(buffer);
+ if(buffer->cursor >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ if(!rc)
+ {
+ while(*buffer->cursor != '\n')
+ {
+ switch(*buffer->cursor)
+ {
+ case 'f':
+ case 'm':
+ case 'D':
+ _copy_line(buffer);
+ break;
+ case 'M':
+ _process_commit_action(buffer);
+ break;
+ default:
+ fprintf(stderr, "unrecognized commit action '%.120s'\n", buffer->cursor - 20);
+ exit(1);
+ }
+ if(buffer->cursor >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ }
+ }
+ buffer->nb_commit += 1;
+ buffer->head = buffer->cursor;
+// fprintf(stderr, "<--process_commit\n");
+ return rc;
+}
+
+static int _process_filtering_commit(struct buffer* buffer)
+{
+int rc = 0;
+
+// fprintf(stderr, "-->process_commit\n");
+ buffer->cursor = buffer->head;
+ while(*buffer->cursor != 'd')
+ {
+ _copy_line(buffer);
+ if(buffer->cursor >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ }
+ _copy_data(buffer);
+ if(buffer->cursor >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ if(!rc)
+ {
+ while(*buffer->cursor != '\n')
+ {
+ switch(*buffer->cursor)
+ {
+ case 'f':
+ case 'm':
+ _copy_line(buffer);
+ break;
+ case 'D':
+ _filter_line(buffer);
+ break;
+ case 'M':
+ _filter_commit_action(buffer);
+ break;
+ default:
+ fprintf(stderr, "unrecognized commit action '%.120s'\n", buffer->cursor - 20);
+ exit(1);
+ }
+ if(buffer->cursor >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ }
+ }
+ buffer->nb_commit += 1;
+ buffer->head = buffer->cursor;
+// fprintf(stderr, "<--process_commit\n");
+ return rc;
+}
+
+static void _process_tag(struct buffer* buffer)
+{
+ buffer->cursor = buffer->head;
+ while(*buffer->cursor != 'd')
+ {
+ _copy_line(buffer);
+ if(buffer->cursor >= buffer->tail)
+ {
+ _read_more(buffer);
+ }
+ }
+ _copy_data(buffer);
+ buffer->nb_tag += 1;
+ buffer->head = buffer->cursor;
+}
+
+static int _consume_input(struct buffer* buffer)
+{
+int rc = 0;
+
+// fprintf(stderr, "-->consume_input\n");
+ do
+ {
+ switch(*(buffer->head))
+ {
+ case 'b':
+ _process_blob(buffer);;
+ break;
+ case 'c':
+ if(buffer->alternate_commit)
+ {
+ rc = _process_filtering_commit(buffer);
+ }
+ else
+ {
+ rc = _process_commit(buffer);
+ }
+ break;
+ case 't':
+ _process_tag(buffer);
+ break;
+ default:
+ _copy_line(buffer);
+ buffer->head = buffer->cursor;
+ break;
+ }
+
+ }
+ while(buffer->head < buffer->tail);
+// fprintf(stderr, "<--consume_input\n");
+ return rc;
+}
+
+
+int main(int argc, char** argv)
+{
+int rc = 0;
+int i;
+int received = 0;
+struct buffer* buffer = &g_buffer;
+
+ buffer->allocated = kBUFFER_SIZE;
+ for(i = 1; i < argc; i++)
+ {
+ if(!strcmp(argv[i], "--prefix"))
+ {
+ i += 1;
+ if(i < argc)
+ {
+ g_prefix = argv[i];
+ }
+ }
+ else if(!strcmp(argv[i], "--exclude-module"))
+ {
+ i += 1;
+ if(i < argc)
+ {
+ buffer->module = buffer->exclude_module = argv[i];
+ buffer->module_len = strlen(buffer->module);
+ }
+ }
+ else if(!strcmp(argv[i], "--exclude-download"))
+ {
+ buffer->exclude_download = 1;
+ }
+ else if(!strcmp(argv[i], "--filter-module"))
+ {
+ i += 1;
+ if(i < argc)
+ {
+ buffer->module = buffer->filter_module = argv[i];
+ buffer->module_len = strlen(buffer->module);
+ }
+ }
+ else if(!strcmp(argv[i], "--exclude-suffix"))
+ {
+ i += 1;
+ if(i < argc)
+ {
+ buffer->exclude_suffix = argv[i];
+ buffer->suffix_len = strlen(buffer->exclude_suffix);
+ }
+ }
+ else if(!strcmp(argv[i], "--buffer-size"))
+ {
+ i += 1;
+ if(i < argc)
+ {
+ buffer->allocated = atoi(argv[i]);
+ if(buffer->allocated < 10 || buffer->allocated > 1024)
+ {
+ fprintf(stderr, "--buffer-size must be in MB between 10 and 1024\n");
+ exit(1);
+ }
+ buffer->allocated *= 1024*1024;
+ }
+ }
+ else
+ {
+ fprintf(stderr, "ignored unknown arg: |%s|\n", argv[i]);
+ }
+ }
+ if(buffer->exclude_module || buffer->filter_module || buffer->exclude_suffix || buffer->exclude_download)
+ {
+ buffer->alternate_commit = 1;
+ }
+
+ buffer->data = malloc(buffer->allocated + 1);
+ if(!buffer->data)
+ {
+ return ENOMEM;
+ }
+ buffer->workspace = malloc((buffer->allocated * 3) / 2 + 1);
+ if(!buffer->workspace)
+ {
+ return ENOMEM;
+ }
+ buffer->head = buffer->data;
+ buffer->tail = buffer->data;
+ buffer->cursor = buffer->data;
+ buffer->free = buffer->allocated;
+
+
+ while(!rc)
+ {
+ if(buffer->free < 4096)
+ {
+ _realign_buffer(buffer);
+ }
+ if(buffer->free > 0)
+ {
+ Retry:
+ received = read(STDIN_FILENO, buffer->tail, buffer->free);
+ if(received > 0)
+ {
+ buffer->tail += received;
+ buffer->free -= received;
+ }
+ else
+ {
+ if(received == 0)
+ {
+ break;
+ }
+ else
+ {
+ if(errno == EINTR)
+ {
+ goto Retry;
+ }
+ else
+ {
+ fprintf(stderr, "read errno:%d\n", errno);
+ return errno;
+ }
+ }
+ }
+ }
+ rc = _consume_input(buffer);
+ }
+ fprintf(stderr, "%s final commit:%d tag:%d blob:%d cleaned:%d not_cleaned:%d\n", g_prefix,
+ buffer->nb_commit, buffer->nb_tag, buffer->nb_blob, buffer->nb_cleaned, buffer->nb_not_cleaned);
+
+ return rc;
+}
diff --git a/archives/onegit/README b/archives/onegit/README
new file mode 100644
index 0000000..5bf273a
--- /dev/null
+++ b/archives/onegit/README
@@ -0,0 +1,237 @@
+
+Single-git-repo Plan as of 2011-05-17 by Norbert Thiebaud
+
+
+currently we have 20 git repository:
+bootstrap + artwork base calc components extensions extras filters help impress
+libs-core libs-extern libs-extern-sys libs-gui postprocess sdk testing
+translations ure writer
+
+For reference, the size of the .git of each of these repository is:
+
+47M bootstrap
+57M artwork
+27M base
+83M calc
+44M components
+50M extensions
+42M extras
+99M filters
+29M help
+32M impress
+184M libs-core
+317M libs-extern-sys
+70M libs-extern
+101M libs-gui
+1.3M postprocess
+11M sdk
+81M testing
+240M translations
+59M ure
+92M writer
+
+I concentrate on the size of the .git because that is the size that really
+matter for the performance of mot git operations, and especially for git clone
+out of a remote location.
+
+The general idea is to consolidate some of these repositories together using a
+simple git fetch
+This technic as the merit of relative simplicity. but the drawback is that
+the resulting history, although theorically complete, is fairly unsuable.
+That is it would be very cumbersome to be able to check-out at a point in
+time prior to when the actual fusion is done and have a complete
+buildable tree.
+The core issue is that the 'true' history is in fact represented by 20 parallels
+history.
+In order to mitigate that, the tag for each imported repository will be renamed,
+prefixing the name of the repository in from of the tag name, that way we will
+avoid name conflict, and the differents tag for the same 'history' level
+in each of the repository is still accessible.
+Nevertheless, you can only check out one of these tag, i.e essentially only
+one old-repository at the time.
+It is possible to still checkout everything properly, but that would require
+a somewhat elaborate setup... It is simpler to keep a version
+of our current split git around for these purposes.
+
+== Per repository analysis ==
+
+we will review each repostory and discuss what the migration will
+entail for it.
+
+=== bootstrap ===
+
+bootstrap will be used as the anchor for the whole process. it is left as is
+and become, after all processing our new 'core' repository
+
+== base calc components extensions extras impress libs-core libs-gui postprocess sdk writer ==
+
+these are bread-and-butter code repository. I did not see any discussions
+suggesting they should not be fusionned.
+The only processiong step is to rename the tag to avoid name-conflict
+
+The current version of the onegit.sh script combine all these repositories in
+the core repo.
+
+=== artwork ===
+
+Artwork contains mostly binary objects. Its content is mostly the domain of
+the graphic design team, and from the dev side change consiste mostly into
+naming thing and moving things around.
+
+The rate of change is fairly low (46 commits since the beginning of the year)
+The size of the repo is moderate (with respect to the other repositories)
+
+For simplicity sake, it would be not to costly to consolidate artwork into core.
+But on long term it may be usefull, from a size perspective and from a
+division of labor aspect to keep artwork as a stand-alone repo.
+In the later case we will kepp it initialy as a sub-git in clone/artwork
+just as today.
+
+
+The current version of the onegit.sh script combine artwork into core,
+but a decision whether to do that or to keep it separate must be taken
+before the migration.
+
+=== help ===
+
+This repos contains script and metadata to generate the help files.
+Technically this is an optional repository, but it is a fairly small one
+and it directly depend on tools that are in libs-gui and other modules
+in the core repository.
+
+I'm unsure about how that repo content is actually managed.
+
+The current version of the onegit.sh script combine help in
+the core repo.
+It could porbably be kept as a stand-alone repo without too much negative
+impact. Input on that are welcome...
+
+=== filters ===
+
+filters particularity is being the host of binfilter.
+Since binfilter is deprecated and will eventually be dropped, it sound
+like a good idea to take advantage of this intrusive reorganisation
+of the git repositories to extract binfilter out of filters
+This save about 60% of the size of filters.
+
+So binfilter will be extracted out of filters, the resulting 'lean' filters
+will have it's tag renamed and combined with core.
+the new binfilter git repository will be placed in clone/binfilter and
+managed the same way auxiliary git repository are managed today
+with the added benefit of being completely optional
+
+=== libs-extern ===
+
+libs-extern is essentailly a wrapper to patchand build external libraries.
+It is a fairly big repository, but in fact a substential part of the size
+is due to that once upon a time the tar.gz of these external libraries
+were dumped into the git repo. we do not do that anymore, but the history
+still reflect that.
+Once again, since we are going to an instrusive reorg we may as well take
+advantage of this to do some clean-up and put that repo on a diet.
+
+So libs-extern will undergo a git-filter to remove historical */download/*
+instances and to rename the tags. it will then be combined with core.
+
+=== libs-extern-sys ===
+
+the case of libs-extern-sys is very similar to that of libs-extern
+with the added twist that a big part of this repo is used by the 'dictionaries'
+module. 'dictionnaries' is more related to 'translations' than to the core code
+it is usually maintained by the localisation/translation group.
+
+libs-extern-sys as it stand is a pretty big repo, in fact our biggest
+but without dictionnaries and with a clean-up of the old historical
+tar.gz it sie dwindle to 2MB
+
+So, dictionnaries will be extracted from libs-extern-sys and then added to
+translations.
+
+the remaining libs-extern-sys will undergo a git-filter to remove historical
+*/download/* files, the tag will be renamed and then combined with core.
+
+=== testing ===
+
+The testing repository is a fairly sizeable repo. it has been suggested that it be kept
+apart.
+Is is saddly a fairly low activity git at this point, but we should certainly
+hope that it won't remain so.
+I find it hard to justify keeping it appart as test should live and evolve
+fairly simbiotically with the code it tests.
+keeping these two in sync would means mostly maintaining the ./g mechanism
+we have currently, with no real hope that I can see to avoid it. That kind
+of defeat the purpose of this exercise.
+
+The current version of the onegit.sh script combine testing into core,
+but a decision whether to do that or to keep it separate must be taken
+before the migration.
+
+=== translations ===
+
+The translatio repository is a big repository, managed quasi-exclusively by
+the translation/localisation team. It is an optional repository in the
+sens that it is not required to build the product.
+
+There seems to be a consensus that 'translations' should not be combined
+into core, and should be left alone. In fact most of the discussion resolve
+around improving the independance of 'translation' to possibly make the
+localisation process a completely independant process.
+
+So translations is left untouched and will still be optionally present in
+clone/translations
+
+=== ure ===
+
+The ure repository is a relatively small, but has a pretty high rate of change
+(574 change year to date). There are quite a few items on the ToDo list that
+seems to indicate that ure will sustain a sizeable rate of change in the
+foreseable future.
+
+So altough ure can conceptually be seen as an 'external' library. It seems
+that trying maintain it as such will, for practical reason means that it
+would remain as an auxilliary git in clone/, again defeating the purpose
+of simpifying the git repo layout.
+Furthermore, there are numerous changes that impact both ur and the rest
+of the code. keeping ure separate would make bisection very hard/slow at best
+
+But since there was no consensus yet on this, the issue must be addressed
+before we can proceed.
+
+The current version of the onegit.sh script combine ure into core,
+but a decision whether to do that or to keep it separate must be taken
+before the migration.
+
+== onegit.sh ==
+
+The onegit.sh, found in contrib/dev-tools, implement the steps described above.
+You are all encouraged to try it for yourselves and check that the result is sane
+
+
+I'm planning to add to this script a few step at the end to patche core
+so that autogen and the other scripts are adjusted to the changes..
+
+== deployement ==
+
+The migration is intrusive. Technically cherry picking from the existing layout
+should still be possible, but should be done in a dedicated set-up because
+pulling the existing git repos into the migrated one to cherry pick will
+make them grow a lot... it is probably easier to git-format-patch the commit
+to cherry pick and then apply them as patches...
+
+Still in order to limit the amount of such patches, it is preferable to do the
+migration when 3.4 has stabilized... so after 3.4.2...
+
+Furthermore, during the actual migration master on the original repos need to be
+somehow shutdown.
+We need to plan for this, make sure that everybody having commit access is aware
+of the planning and take appropriate step to flush what need to be flushed out
+of their local tree.
+
+The result of the conversion should be a libreoffice/core.git repo and an
+additional libreoffice/binfiter.git.
+
+The build instructions will remain unchanged (except to use core.git instead
+of bootstrap)
+
+
+
diff --git a/archives/onegit/checkgit.sh b/archives/onegit/checkgit.sh
new file mode 100755
index 0000000..76dfc86
--- /dev/null
+++ b/archives/onegit/checkgit.sh
@@ -0,0 +1,54 @@
+#!/bin/bash
+
+SPLIT_GIT=/local/libreoffice/master
+ONE_GIT=/local/libreoffice/dev-tools/onegit/libo
+
+# list all revisions of the file
+function all_revs {
+ FILE="$1"
+
+ # first the log (without commit numbers)
+ git log --reverse --pretty=format:"Author: %an <%ae>%nDate: %ai%nCommitter: %cn <%ce>%nCommit date: %ci%n%n%B" "$FILE"
+
+ # then all the revisions of the file
+ git rev-list --reverse --objects HEAD -- "$FILE" | while read SHA REST ; do
+ TYPE=`git cat-file -t $SHA`
+ if [ "$TYPE" = "blob" ] ; then
+ git cat-file -p $SHA
+ fi
+ done
+}
+
+cd "$ONE_GIT"
+for MODULE in .[^.]* * ; do
+(
+ [ "$MODULE" != ".git" ] && find $MODULE -type f | while read FILE ; do
+ all_revs "$FILE" > /tmp/testrev.$MODULE.onegit
+
+ TRY_FILE=`eval "echo $SPLIT_GIT/clone/*/$FILE"`
+ DIR="$SPLIT_GIT"
+ if [ -f "$TRY_FILE" ] ; then
+ DIR="${TRY_FILE%$FILE}"
+ fi
+ pushd "$DIR" > /dev/null
+ all_revs "$FILE" > /tmp/testrev.$MODULE.splitgit
+ popd > /dev/null
+
+ (
+ echo -n "Trying $FILE ... "
+ if diff -uw /tmp/testrev.$MODULE.splitgit /tmp/testrev.$MODULE.onegit ; then
+ echo "OK"
+ else
+ echo "ERROR: $FILE differs"
+ fi
+ ) > /tmp/testrev.$MODULE.log
+
+ (
+ flock -x 200
+ cat /tmp/testrev.$MODULE.log
+ ) 200>/tmp/testrev.lock
+ done
+) &
+done
+
+wait
diff --git a/archives/onegit/onegit.sh b/archives/onegit/onegit.sh
new file mode 100755
index 0000000..83a3fc0
--- /dev/null
+++ b/archives/onegit/onegit.sh
@@ -0,0 +1,322 @@
+#!/usr/bin/env bash
+
+bin_dir=$(dirname "$0")
+pushd "${bin_dir}" > /dev/null
+bin_dir=$(pwd)
+popd > /dev/null
+
+GIT_BASE=$(pwd)
+GIT_NAME="libo"
+GIT_TEMP=${GIT_BASE}/gittemp
+
+batch="[main]"
+die()
+{
+ echo "*** $(date +'%Y-%m-%d-%H:%M:%S') $batch $@" | tee -a ${GIT_BASE?}/onegit.msgs >&2
+ exit 1
+}
+
+log()
+{
+ echo "=== $(date +'%Y-%m-%d-%H:%M:%S') $batch $@" | tee -a ${GIT_BASE?}/onegit.msgs >&2
+}
+
+usage()
+{
+cat <<EOF
+Usage $0 [options] -g <git_base_url>
+Options:
+ -a Just apply the patches (this need to be after -C and -n if they are specified)
+ -C base directory where to create the onegit repo. the default
+ is the current working directory, i.e '.'
+ -g base part of the url to access the libreoffice repos
+ for example -g "git://anongit.freedesktop.org/libreoffice"
+ or -f "/lo/"
+ if the url given is a local directory we expect it
+ to be the path of bootstrap. the other repos are expected to be in <path>/clone/
+ -n name of the onegit repo to be created. the default is 'libo'
+ -t temp directory (default the one given by -C + /gittemp). we need a few GB
+EOF
+
+}
+
+merge_generic()
+{
+local r="$1"
+
+(
+ flock -x 200
+ pushd ${GIT_BASE?}/${GIT_NAME?} > /dev/null
+
+ log "merge $r into onegit"
+ git remote add $r "${GIT_TEMP?}/$r" || die "Error adding remote ${GIT_TEMP?}/$r"
+ git fetch $r || die "Error fetching $r"
+ git merge -Xours $r/master || die "Error merging $r/master"
+ git remote rm $r || die "Error removing remote $r/master"
+
+ popd > /dev/null # GIT_BASE/GIT_NAME
+) 200> /tmp/ongit.lockfile
+ log "done merging $r"
+}
+
+process_generic()
+{
+local r=$1
+shift
+local s=$1
+shift
+local extra=$@
+
+ pushd ${GIT_TEMP?} > /dev/null || die "Error cd-ing to ${GIT_TEMP?}"
+
+ log "fast-import of $r"
+ mkdir $r
+ pushd $r > /dev/null || die "Error cd-ing to $r"
+ git init
+ (cd "${SOURCE_GIT_BASE?}/$s" && git fast-export --signed-tag=strip --branches --tags ) | lo_git_rewrite --prefix "$r:" $extra | git fast-import
+ git reset --hard > /dev/null
+ for oldtag in $(git tag) ; do git tag "${r}_${oldtag}" "$oldtag" ; git tag -d "${oldtag}" > /dev/null ; done
+
+ log "git gc of $r"
+ git gc --prune=now --aggressive || die "Error during git-gc of $r"
+ popd > /dev/null # $r
+
+ popd > /dev/null # GIT_TEMP
+ log "Done generic processing for $r"
+}
+
+merge_bootstrap()
+{
+ pushd ${GIT_BASE?} > /dev/null
+ log "clone bootstrap to onegit"
+ git clone "${GIT_TEMP?}/bootstrap" ${GIT_NAME?} || die "Error cloning ${GIT_TEMP?}/bootstrap"
+
+ cp -r "${SOURCE_GIT_BASE?}/src" "${GIT_NAME?}/."
+
+ pushd ${GIT_NAME?} > /dev/null || die "Error cd-ing to $(pwd)/${GIT_NAME?}"
+ mkdir clone || die "Error creating $(pwd)/clone directory"
+ popd > /dev/null # GIT_NAME
+
+ popd > /dev/null # GIT_BASE
+ log "Done merging bootstrap"
+}
+
+process_batch1()
+{
+ batch="[batch1]"
+
+ process_generic bootstrap "."
+ merge_bootstrap
+
+ process_generic ure clone/ure
+ merge_generic ure
+
+ process_generic calc clone/calc
+ merge_generic calc
+
+ process_generic sdk clone/sdk
+ merge_generic sdk
+
+ process_generic extras clone/extras
+ merge_generic extras
+
+ process_generic impress clone/impress --exclude-suffix "/wntmsci10"
+ merge_generic impress
+
+ process_generic artwork clone/artwork --exclude-module external_images
+ merge_generic artwork
+
+ process_generic extensions clone/extensions --buffer-size 80
+ merge_generic extensions
+
+ # deal with still separate repos, purely untouched like help or translations
+ pushd ${GIT_BASE?}/${GIT_NAME?}/clone > /dev/null || die "Error cd.ing to ${GIT_BASE}/${GIT_NAME}/clone from $(pwd)"
+ log "clone help"
+ git clone "${SOURCE_GIT_BASE?}/clone/help" help || die "Error cloning ${SOURCE_GIT_BASE?}/clone/help"
+ log "Done cloning help"
+
+ log "clone transations"
+ git clone "${SOURCE_GIT_BASE?}/clone/translations" translations || die "Error cloning ${SOURCE_GIT_BASE?}/clone/translations"
+ log "Done cloning translations"
+ popd > /dev/null # GIT_BASE/GIT_NAME/clone
+
+
+ log "Done processing batch1"
+}
+
+process_batch2()
+{
+ batch="[batch2]"
+
+ process_generic writer clone/writer
+ merge_generic writer
+
+ process_generic base clone/base
+ merge_generic base
+
+ process_generic filters clone/filters --exclude-module binfilter
+ merge_generic filters
+
+ process_generic binfilter clone/filters --filter-module binfilter
+ log "clone binfilter"
+ pushd ${GIT_BASE}/${GIT_NAME?}/clone > /dev/null || die "Error cd-ing to ${GIT_NAME}/clone"
+ git clone "${GIT_TEMP?}/binfilter" binfilter || die "Error cloning ${GIT_TEMP?}/binfilter"
+ popd > /dev/null # GIT_BASE/GIT_NAME/clone
+ log "Done merging binfilter"
+
+ log "Done processing batch2"
+}
+
+process_batch3()
+{
+ batch="[batch3]"
+
+ process_generic libs-gui clone/libs-gui --exclude-suffix "/wntmsci10"
+ merge_generic libs-gui
+
+ process_generic components clone/components
+ merge_generic components
+
+ process_generic testing clone/testing
+ merge_generic testing
+
+ process_generic libs-extern-sys clone/libs-extern-sys --exclude-module dictionaries --exclude-download --buffer-size 80
+ merge_generic libs-extern-sys
+
+ process_generic libs-extern clone/libs-extern --exclude-download
+ merge_generic libs-extern
+
+ process_generic dictionaries clone/libs-extern-sys --filter-module dictionaries --buffer-size 80
+
+ pushd ${GIT_BASE?}/${GIT_NAME?}/clone > /dev/null || die "Error cd.ing to ${GIT_BASE}/${GIT_NAME}/clone from $(pwd)"
+ log "clone dictionaries"
+ git clone "${GIT_TEMP?}/dictionaries" dictionaries || die "Error cloning ${GIT_TEMP?}/dictionaries"
+ log "Done cloning dictionnaries"
+ popd > /dev/null # GIT_BASE/GIT_NAME/clone
+
+
+
+ log "Done processing batch3"
+}
+
+process_batch4()
+{
+ batch="[batch4]"
+
+ process_generic libs-core clone/libs-core --exclude-suffix "/wntmsci10"
+ merge_generic libs-core
+
+ process_generic postprocess clone/postprocess
+ merge_generic postprocess
+
+ log "Done processing batch4"
+}
+
+apply_patches()
+{
+ pushd ${GIT_BASE?}/${GIT_NAME?} > /dev/null || die "Error cd-ing to ${GIT_BASE}/${GIT_NAME}"
+ for p in $(ls -1 ${bin_dir}/patches) ; do
+ log "Apply patch $p"
+ (cat ${bin_dir}/patches/$p | git am -k ) || die "Error applying the patch"
+ done
+ popd > /dev/null
+}
+
+##### main
+
+while getopts aC:g:hn:t: opt ; do
+ case "$opt" in
+ a) aply_patches; exit ;;
+ C) GIT_BASE="$OPTARG" ;;
+ g) SOURCE_GIT_BASE="$OPTARG" ;;
+ h) usage; exit ;;
+ n) GIT_NAME="$OPTARG" ;;
+ t) GIT_TEMP="$OPTARG" ;;
+ esac
+done
+
+# make sure we have a directory to work in (out new git repos will be created there,
+# and our workdir for temporary repos
+if [ ! -d ${GIT_BASE?} ] ; then
+ die "$GIT_BASE is not a directory, please create it before using it"
+fi
+cat /dev/null > ${GIT_BASE?}/onegit.msgs
+
+
+# make sure we have a location for the source repos
+if [ -z "$SOURCE_GIT_BASE" ] ; then
+ die "Missing -g arguement. use -h for help"
+fi
+if [ ! -d "${SOURCE_GIT_BASE?}" ] ; then
+ die "$SOURCE_GIT_BASE is not a directory"
+fi
+
+# preferably our target core repo does not exist already
+if [ -e "${GIT_BASE?}/${GIT_NAME?}" ] ; then
+ die "$GIT_BASE/$GIT_NAME already exist, cannot create a git repo there"
+fi
+
+#check that lo_git_rewrite is built
+if [ ! -x "${bin_dir?}/../lo_git_rewrite/lo_git_rewrite" ] ; then
+ die "${bin_dir?}/../lo_git_rewrite/lo_git_rewrite need to be build"
+else
+ export PATH="$PATH:${bin_dir?}/../lo_git_rewrite/"
+fi
+
+if [ ! -d "${GIT_TEMP?}" ] ; then
+ log "create a temporary workarea ${GIT_TEMP?}"
+ mkdir "${GIT_TEMP?}" || die "Error creating directory ${GIT_TEMP?}"
+fi
+
+log "Start OneGit conversion"
+
+(process_batch1)&
+p_batch1=$!
+
+(process_batch2)&
+p_batch2=$!
+
+(process_batch3)&
+p_batch3=$!
+
+(process_batch4)&
+p_batch4=$!
+
+result=0
+wait $p_batch1 || result=1
+wait $p_batch2 || result=1
+wait $p_batch3 || result=1
+wait $p_batch4 || result=1
+
+if [ $result -ne 0 ] ; then
+ exit $result
+fi
+
+log "Tag new repos"
+
+pushd ${GIT_BASE?}/${GIT_NAME?} > /dev/null || die "Error cd-int to ${GIT_BASE}/${GIT_NAME} to tag"
+git tag -m "OneGit script applied" MELD_LIBREOFFICE_REPOS || die "Error applying tag on core"
+
+pushd clone/translations > /dev/null
+git tag -m "OneGit script applied" MELD_LIBREOFFICE_REPOS || die "Error applying tag on translations"
+popd > /dev/null # clone/translation
+
+pushd clone/binfilter > /dev/null
+git tag -m "OneGit script applied" MELD_LIBREOFFICE_REPOS || die "Error applying tag on binfilter"
+popd > /dev/null # clone/binfilter
+
+pushd clone/help > /dev/null
+git tag -m "OneGit script applied" MELD_LIBREOFFICE_REPOS || die "Error applying tag on help"
+popd > /dev/null # clone/help
+
+pushd clone/dictionaries > /dev/null
+git tag -m "OneGit script applied" MELD_LIBREOFFICE_REPOS || die "Error applying tag on help"
+popd > /dev/null # clone/dictionaries
+
+log "Apply patches"
+apply_patches
+
+popd > /dev/null # GIT_BASE/GIT_NAME
+
+log "OneGit conversion All Done."
+
diff --git a/archives/onegit/patches/0001-adjust-.gitignore-for-new-combin.patch b/archives/onegit/patches/0001-adjust-.gitignore-for-new-combin.patch
new file mode 100644
index 0000000..35ae43a
--- /dev/null
+++ b/archives/onegit/patches/0001-adjust-.gitignore-for-new-combin.patch
@@ -0,0 +1,234 @@
+From 0236fb840253d9ab84b8c65004d97dfc7e6e36e2 Mon Sep 17 00:00:00 2001
+From: Norbert Thiebaud <nthiebaud@gmail.com>
+Date: Sat, 11 Jun 2011 16:21:51 -0500
+Subject: adjust .gitignore for new combined repo
+
+---
+ .gitignore | 210 +-----------------------------------------------------------
+ 1 files changed, 1 insertions(+), 209 deletions(-)
+
+diff --git a/.gitignore b/.gitignore
+index 575eb3c..ee9f91d 100644
+--- a/.gitignore
++++ b/.gitignore
+@@ -71,216 +71,8 @@
+ /*/*.exe
+
+ # links to the other repositories
+-/MathMLDTD
+-/Mesa
+-/UnoControls
+-/agg
+-/accessibility
+-/afms
+-/animations
+-/apache-commons
+-/apple_remote
+-/autodoc
+-/automation
+-/avmedia
+-/basctl
+-/basebmp
+-/basegfx
+-/basic
+-/bean
+-/beanshell
+-/berkeleydb
+-/binaryurp
+ /binfilter
+-/boost
+-/bridges
+-/cairo
+-/canvas
+-/chart2
+-/cli_ure
+-/codemaker
+-/comphelper
+-/configmgr
+-/connectivity
+-/cosv
+-/cppcanvas
+-/cppu
+-/cppuhelper
+-/cppunit
+-/cpputools
+-/crashrep
+-/ct2n
+-/cui
+-/curl
+-/dbaccess
+-/default_images
+-/desktop
+ /dictionaries
+-/drawinglayer
+-/dtrans
+-/editeng
+-/embeddedobj
+-/embedserv
+-/epm
+-/eventattacher
+-/expat
+-/extensions
+-/external
+-/external_images
+-/extras
+-/fileaccess
+-/filter
+-/forms
+-/formula
+-/fpicker
+-/framework
+-/gdk-pixbuf
+-/gettext
+-/glib
+-/graphite
+ /helpcontent2
+-/hsqldb
+-/hunspell
+-/hwpfilter
+-/hyphen
+-/i18npool
+-/i18nutil
+-/icc
+-/icu
+-/idl
+-/idlc
+-/io
+-/javainstaller2
+-/javaunohelper
+-/jfreereport
+-/jpeg
+-/jurt
+-/jvmaccess
+-/jvmfwk
+-/l10n
+-/l10ntools
+-/languagetool
+-/libcroco
+-/libegg
+-/libgsf
+-/libpng
+-/librsvg
+-/libtextcat
+-/libvisio
+-/libwpd
+-/libwpg
+-/libwps
+-/libxml2
+-/libxmlsec
+-/libxslt
+-/lingucomponent
+-/linguistic
+-/lotuswordpro
+-/lpsolve
+-/lucene
+-/mdds
+-/migrationanalysis
+-/more_fonts
+-/moz
+-/mysqlc
+-/mysqlcppconn
+-/mythes
+-/neon
+-/nlpsolver
+-/np_sdk
+-/nss
+-/o3tl
+-/odk
+-/offapi
+-/officecfg
+-/offuh
+-/ooo_custom_images
+-/oovbaapi
+-/oox
+-/openssl
+-/package
+-/packimages
+-/padmin
+-/pango
+-/postprocess
+-/psprint_config
+-/python
+-/pyuno
+-/qadevOOo
+-/rdbmaker
+-/readlicense_oo
+-/redland
+-/regexp
+-/registry
+-/remotebridges
+-/reportbuilder
+-/reportdesign
+-/rhino
+-/ridljar
+-/rsc
+-/sal
+-/salhelper
+-/sane
+-/sax
+-/saxon
+-/sc
+-/scaddins
+-/sccomp
+-/scripting
+-/sd
+-/sdext
+-/setup_native
+-/sfx2
+-/shell
+-/slideshow
+-/smoketestdoc
+-/smoketestoo_native
+-/sot
+-/starmath
+-/stax
+-/stoc
+-/store
+-/svl
+-/svtools
+-/svx
+-/sw
+-/swext
+-/sysui
+-/test
+-/testautomation
+-/testgraphical
+-/testtools
+-/tomcat
+-/toolkit
+-/tools
+-/translate_toolkit
+ /translations
+-/twain
+-/ucb
+-/ucbhelper
+-/udkapi
+-/udm
+-/unixODBC
+-/unodevtools
+-/unoil
+-/unotools
+-/unoxml
+-/ure
+-/uui
+-/vbahelper
+-/vcl
+-/vigra
+-/wizards
+-/writerfilter
+-/writerperfect
+-/x11_extensions
+-/xmerge
+-/xml2cmp
+-/xmlhelp
+-/xmloff
+-/xmlreader
+-/xmlscript
+-/xmlsecurity
+-/xpdf
+-/xsltml
+-/zlib
++
+--
+1.7.3.4
+
diff --git a/archives/onegit/patches/0002-adjust-the-list-of-external-git-.patch b/archives/onegit/patches/0002-adjust-the-list-of-external-git-.patch
new file mode 100644
index 0000000..441413b
--- /dev/null
+++ b/archives/onegit/patches/0002-adjust-the-list-of-external-git-.patch
@@ -0,0 +1,25 @@
+From 36cc551b4b5ff871fa24ddd9756ab4b373cac49f Mon Sep 17 00:00:00 2001
+From: Norbert Thiebaud <nthiebaud@gmail.com>
+Date: Mon, 13 Jun 2011 13:56:41 -0500
+Subject: adjust the list of external git repos for onegit
+
+---
+ configure.in | 2 +-
+ 1 files changed, 1 insertions(+), 1 deletions(-)
+
+diff --git a/configure.in b/configure.in
+index 9ad2f52..d400124 100755
+--- a/configure.in
++++ b/configure.in
+@@ -8544,7 +8544,7 @@ AC_SUBST(OOO_JUNIT_JAR)
+ dnl ===================================================================
+ dnl Dealing with l10n options
+ dnl ===================================================================
+-GIT_REPO_NAMES="artwork base calc components extensions extras filters help impress libs-core libs-extern libs-extern-sys libs-gui postprocess sdk testing ure writer"
++GIT_REPO_NAMES="help"
+ AC_MSG_CHECKING([which languages to be built])
+ # get list of all languages
+ # generate shell variable from completelangiso= from solenv/inc/langlist.mk
+--
+1.7.3.4
+
diff --git a/archives/onegit/patches/0003-remove-clone-calc-reference-in-lotuswordpro-qa.patch b/archives/onegit/patches/0003-remove-clone-calc-reference-in-lotuswordpro-qa.patch
new file mode 100644
index 0000000..097a62f
--- /dev/null
+++ b/archives/onegit/patches/0003-remove-clone-calc-reference-in-lotuswordpro-qa.patch
@@ -0,0 +1,27 @@
+From be518200dbbab30b530a958abdcc1f04cc951611 Mon Sep 17 00:00:00 2001
+From: Norbert Thiebaud <nthiebaud@gmail.com>
+Date: Thu, 21 Jul 2011 08:51:07 -0500
+Subject: remove /clone/calc reference in lotuswordpro qa
+
+---
+ lotuswordpro/qa/cppunit/test_lotuswordpro.cxx | 4 ++--
+ 1 files changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/lotuswordpro/qa/cppunit/test_lotuswordpro.cxx b/lotuswordpro/qa/cppunit/test_lotuswordpro.cxx
+index 2766253..12bc0f5 100644
+--- a/lotuswordpro/qa/cppunit/test_lotuswordpro.cxx
++++ b/lotuswordpro/qa/cppunit/test_lotuswordpro.cxx
+@@ -147,8 +147,8 @@ namespace
+
+ void LotusWordProTest::test()
+ {
+- recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/filters/lotuswordpro/qa/cppunit/data/pass")), true);
+- recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/filters/lotuswordpro/qa/cppunit/data/fail/")), false);
++ recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/lotuswordpro/qa/cppunit/data/pass")), true);
++ recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/lotuswordpro/qa/cppunit/data/fail/")), false);
+
+ printf("LotusWordPro: tested %d files\n", m_nLoadedDocs);
+ }
+--
+1.7.3.4
+
diff --git a/archives/onegit/patches/0004-remove-clone-calc-references-in-filters-test.cxx.patch b/archives/onegit/patches/0004-remove-clone-calc-references-in-filters-test.cxx.patch
new file mode 100644
index 0000000..585423d
--- /dev/null
+++ b/archives/onegit/patches/0004-remove-clone-calc-references-in-filters-test.cxx.patch
@@ -0,0 +1,48 @@
+From 498bd2e88e35197b6dae50efcea993073d8119f9 Mon Sep 17 00:00:00 2001
+From: Norbert Thiebaud <nthiebaud@gmail.com>
+Date: Thu, 21 Jul 2011 08:51:36 -0500
+Subject: remove clone/calc references in filters-test.cxx
+
+---
+ sc/qa/unit/filters-test.cxx | 12 ++++++------
+ 1 files changed, 6 insertions(+), 6 deletions(-)
+
+diff --git a/sc/qa/unit/filters-test.cxx b/sc/qa/unit/filters-test.cxx
+index 288249c..09cb107 100644
+--- a/sc/qa/unit/filters-test.cxx
++++ b/sc/qa/unit/filters-test.cxx
+@@ -157,25 +157,25 @@ void FiltersTest::recursiveScan(const rtl::OUString &rFilter, const rtl::OUStrin
+ void FiltersTest::testCVEs()
+ {
+ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Quattro Pro 6.0")),
+- m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/calc/sc/qa/unit/data/qpro/pass")), rtl::OUString(), true);
++ m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sc/qa/unit/data/qpro/pass")), rtl::OUString(), true);
+
+ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Quattro Pro 6.0")),
+- m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/calc/sc/qa/unit/data/qpro/fail")), rtl::OUString(), false);
++ m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sc/qa/unit/data/qpro/fail")), rtl::OUString(), false);
+
+ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Quattro Pro 6.0")),
+- m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/calc/sc/qa/unit/data/qpro/indeterminate")), rtl::OUString(), indeterminate);
++ m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sc/qa/unit/data/qpro/indeterminate")), rtl::OUString(), indeterminate);
+
+ //warning, the current "sylk filter" in sc (docsh.cxx) automatically
+ //chains on failure on trying as csv, rtf, etc. so "success" may
+ //not indicate that it imported as .slk.
+ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("SYLK")),
+- m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/calc/sc/qa/unit/data/slk/pass")), rtl::OUString(), true);
++ m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sc/qa/unit/data/slk/pass")), rtl::OUString(), true);
+
+ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("SYLK")),
+- m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/calc/sc/qa/unit/data/slk/fail")), rtl::OUString(), false);
++ m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sc/qa/unit/data/slk/fail")), rtl::OUString(), false);
+
+ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("SYLK")),
+- m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/calc/sc/qa/unit/data/slk/indeterminate")), rtl::OUString(), indeterminate);
++ m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sc/qa/unit/data/slk/indeterminate")), rtl::OUString(), indeterminate);
+
+ }
+
+--
+1.7.3.4
+
diff --git a/archives/onegit/patches/0005-move-GIT_REPO_NAMES-init-up.patch b/archives/onegit/patches/0005-move-GIT_REPO_NAMES-init-up.patch
new file mode 100644
index 0000000..e86fc04
--- /dev/null
+++ b/archives/onegit/patches/0005-move-GIT_REPO_NAMES-init-up.patch
@@ -0,0 +1,32 @@
+From d7a2f4e2210d665cd283dc836785187ecce5563a Mon Sep 17 00:00:00 2001
+From: Norbert Thiebaud <nthiebaud@gmail.com>
+Date: Thu, 21 Jul 2011 08:54:13 -0500
+Subject: move GIT_REPO_NAMES init up
+
+---
+ configure.in | 2 +-
+ 1 files changed, 1 insertions(+), 1 deletions(-)
+
+diff --git a/configure.in b/configure.in
+index d400124..a6f2262 100755
+--- a/configure.in
++++ b/configure.in
+@@ -1276,6 +1276,7 @@ AC_ARG_WITH(max-jobs,
+
+ BUILD_TYPE="LibO"
+ SCPDEFS=""
++GIT_REPO_NAMES="help"
+
+ echo "********************************************************************"
+ echo "*"
+@@ -8544,7 +8545,6 @@ AC_SUBST(OOO_JUNIT_JAR)
+ dnl ===================================================================
+ dnl Dealing with l10n options
+ dnl ===================================================================
+-GIT_REPO_NAMES="help"
+ AC_MSG_CHECKING([which languages to be built])
+ # get list of all languages
+ # generate shell variable from completelangiso= from solenv/inc/langlist.mk
+--
+1.7.3.4
+
diff --git a/archives/onegit/patches/0006-binfilter-is-now-a-separate-repo.patch b/archives/onegit/patches/0006-binfilter-is-now-a-separate-repo.patch
new file mode 100644
index 0000000..49b3f32
--- /dev/null
+++ b/archives/onegit/patches/0006-binfilter-is-now-a-separate-repo.patch
@@ -0,0 +1,24 @@
+From 29406ecd7c950d407c52ed009df66b4bbb964aec Mon Sep 17 00:00:00 2001
+From: Norbert Thiebaud <nthiebaud@gmail.com>
+Date: Thu, 21 Jul 2011 08:54:33 -0500
+Subject: binfilter is now a separate repo
+
+---
+ configure.in | 1 +
+ 1 files changed, 1 insertions(+), 0 deletions(-)
+
+diff --git a/configure.in b/configure.in
+index a6f2262..8a1d319 100755
+--- a/configure.in
++++ b/configure.in
+@@ -1993,6 +1993,7 @@ if test "$enable_binfilter" = "no"; then
+ else
+ WITH_BINFILTER="YES"
+ BUILD_TYPE="$BUILD_TYPE BINFILTER"
++ GIT_REPO_NAMES="$GIT_REPO_NAMES binfilter"
+ AC_MSG_RESULT([yes])
+ fi
+ AC_SUBST(WITH_BINFILTER)
+--
+1.7.3.4
+
diff --git a/archives/onegit/patches/0007-dictionaries-is-now-in-a-separate-optional-repo.patch b/archives/onegit/patches/0007-dictionaries-is-now-in-a-separate-optional-repo.patch
new file mode 100644
index 0000000..132d674
--- /dev/null
+++ b/archives/onegit/patches/0007-dictionaries-is-now-in-a-separate-optional-repo.patch
@@ -0,0 +1,24 @@
+From 26e04d25df093c740a7ef6dbd9db5cef265c2d1e Mon Sep 17 00:00:00 2001
+From: Norbert Thiebaud <nthiebaud@gmail.com>
+Date: Thu, 21 Jul 2011 08:57:49 -0500
+Subject: dictionaries is now in a separate optional repo
+
+---
+ configure.in | 1 +
+ 1 files changed, 1 insertions(+), 0 deletions(-)
+
+diff --git a/configure.in b/configure.in
+index 8a1d319..b6ca892 100755
+--- a/configure.in
++++ b/configure.in
+@@ -2028,6 +2028,7 @@ if test -z "$with_myspell_dicts" || test "$with_myspell_dicts" = "yes"; then
+ AC_MSG_RESULT([yes])
+ WITH_MYSPELL_DICTS=YES
+ BUILD_TYPE="$BUILD_TYPE DICTIONARIES"
++ GIT_REPO_NAMES="$GIT_REPO_NAMES dictionaries"
+ else
+ AC_MSG_RESULT([no])
+ WITH_MYSPELL_DICTS=NO
+--
+1.7.3.4
+
diff --git a/archives/onegit/patches/0008-bootstrap-is-now-core-in-.-g.patch b/archives/onegit/patches/0008-bootstrap-is-now-core-in-.-g.patch
new file mode 100644
index 0000000..c1ea72e
--- /dev/null
+++ b/archives/onegit/patches/0008-bootstrap-is-now-core-in-.-g.patch
@@ -0,0 +1,43 @@
+From a8013507a33f8e6e098539e19486b084fd33beed Mon Sep 17 00:00:00 2001
+From: Norbert Thiebaud <nthiebaud@gmail.com>
+Date: Thu, 21 Jul 2011 10:18:13 -0500
+Subject: bootstrap is now core in ./g
+
+---
+ g | 6 +++---
+ 1 files changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/g b/g
+index bf6d887..0e4165e 100755
+--- a/g
++++ b/g
+@@ -145,7 +145,7 @@ while shift ; do
+ done
+
+ # do it!
+-DIRS="bootstrap $(cd $CLONEDIR ; ls)"
++DIRS="core $(cd $CLONEDIR ; ls)"
+ if [ "$COMMAND" = "clone" ] ; then
+ DIRS=$(cat "$RAWBUILDDIR/bin/repo-list")
+ # update hooks in the main repo too
+@@ -155,7 +155,7 @@ for REPO in $DIRS ; do
+ DIR="$CLONEDIR/$REPO"
+ NAME="$REPO"
+ HOOKDIR="../../../../git-hooks"
+- if [ "$REPO" = "bootstrap" ] ; then
++ if [ "$REPO" = "core" ] ; then
+ DIR="$RAWBUILDDIR"
+ NAME="main repo"
+ HOOKDIR="../../git-hooks"
+@@ -228,7 +228,7 @@ for REPO in $DIRS ; do
+ ;;
+ clone)
+ EXTRA="$(git config remote.origin.url)"
+- EXTRA=${EXTRA/bootstrap/${REPO}}
++ EXTRA=${EXTRA/core/${REPO}}
+ ;;
+ esac
+
+--
+1.7.3.4
+
diff --git a/archives/onegit/patches/0009-do-not-use-clone-in-paht-for-test-in-hwpfilter.patch b/archives/onegit/patches/0009-do-not-use-clone-in-paht-for-test-in-hwpfilter.patch
new file mode 100644
index 0000000..91f6adc
--- /dev/null
+++ b/archives/onegit/patches/0009-do-not-use-clone-in-paht-for-test-in-hwpfilter.patch
@@ -0,0 +1,27 @@
+From 1fcc3efbcabdbc8663c4295d1d9a9393bb5fc444 Mon Sep 17 00:00:00 2001
+From: Norbert Thiebaud <nthiebaud@gmail.com>
+Date: Thu, 21 Jul 2011 10:18:38 -0500
+Subject: do not use /clone in paht for test in hwpfilter
+
+---
+ hwpfilter/qa/cppunit/test_hwpfilter.cxx | 4 ++--
+ 1 files changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/hwpfilter/qa/cppunit/test_hwpfilter.cxx b/hwpfilter/qa/cppunit/test_hwpfilter.cxx
+index 56c887f..460db0f 100644
+--- a/hwpfilter/qa/cppunit/test_hwpfilter.cxx
++++ b/hwpfilter/qa/cppunit/test_hwpfilter.cxx
+@@ -157,8 +157,8 @@ namespace
+
+ void HwpFilterTest::test()
+ {
+- recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/filters/hwpfilter/qa/cppunit/data/pass")), true);
+- recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/filters/hwpfilter/qa/cppunit/data/fail/")), false);
++ recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/hwpfilter/qa/cppunit/data/pass")), true);
++ recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/hwpfilter/qa/cppunit/data/fail/")), false);
+
+ printf("HwpFilter: tested %d files\n", m_nLoadedDocs);
+ }
+--
+1.7.3.4
+
diff --git a/archives/onegit/patches/0010-do-not-use-clone-in-path-for-test-in-sw.patch b/archives/onegit/patches/0010-do-not-use-clone-in-path-for-test-in-sw.patch
new file mode 100644
index 0000000..36832c4
--- /dev/null
+++ b/archives/onegit/patches/0010-do-not-use-clone-in-path-for-test-in-sw.patch
@@ -0,0 +1,46 @@
+From a99c9b619cb96b9331c2b072f02231779f134935 Mon Sep 17 00:00:00 2001
+From: Norbert Thiebaud <nthiebaud@gmail.com>
+Date: Mon, 1 Aug 2011 15:56:09 -0500
+Subject: [PATCH] do not use clone in path for test in sw
+
+---
+ sw/qa/core/filters-test.cxx | 16 ++++++++--------
+ 1 files changed, 8 insertions(+), 8 deletions(-)
+
+diff --git a/sw/qa/core/filters-test.cxx b/sw/qa/core/filters-test.cxx
+index 71e5d74..e9d2e4a 100644
+--- a/sw/qa/core/filters-test.cxx
++++ b/sw/qa/core/filters-test.cxx
+@@ -167,21 +167,21 @@ void FiltersTest::recursiveScan(const rtl::OUString &rFilter, const rtl::OUStrin
+
+ void FiltersTest::testCVEs()
+ {
+- recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Staroffice XML (Writer)")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/writer/sw/qa/core/data/xml/pass")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("CXML")), true);
++ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Staroffice XML (Writer)")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sw/qa/core/data/xml/pass")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("CXML")), true);
+
+- recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Staroffice XML (Writer)")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/writer/sw/qa/core/data/xml/fail")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("CXML")), false);
++ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Staroffice XML (Writer)")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sw/qa/core/data/xml/fail")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("CXML")), false);
+
+- recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Rich Text Format")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/writer/sw/qa/core/data/rtf/pass")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("RTF")), true);
++ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Rich Text Format")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sw/qa/core/data/rtf/pass")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("RTF")), true);
+
+- recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Rich Text Format")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/writer/sw/qa/core/data/rtf/fail")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("RTF")), false);
++ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Rich Text Format")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sw/qa/core/data/rtf/fail")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("RTF")), false);
+
+- recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Rich Text Format")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/writer/sw/qa/core/data/rtf/indeterminate")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("RTF")), indeterminate);
++ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Rich Text Format")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sw/qa/core/data/rtf/indeterminate")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("RTF")), indeterminate);
+
+- recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("MS Word 97")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/writer/sw/qa/core/data/ww8/pass")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("CWW8")), true);
++ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("MS Word 97")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sw/qa/core/data/ww8/pass")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("CWW8")), true);
+
+- recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("MS Word 97")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/writer/sw/qa/core/data/ww8/fail")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("CWW8")), false);
++ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("MS Word 97")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sw/qa/core/data/ww8/fail")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("CWW8")), false);
+
+- recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("MS Word 97")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/writer/sw/qa/core/data/ww8/indeterminate")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("CWW8")), indeterminate);
++ recursiveScan(rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("MS Word 97")), m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/sw/qa/core/data/ww8/indeterminate")), rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("CWW8")), indeterminate);
+
+ printf("Writer: tested %d files\n", m_nLoadedDocs);
+ }
+--
+1.7.3.4
+
diff --git a/archives/onegit/patches/0011-another-round-of-hard-coded-clone-whack-a-mole-in-sv.patch b/archives/onegit/patches/0011-another-round-of-hard-coded-clone-whack-a-mole-in-sv.patch
new file mode 100644
index 0000000..c67a300
--- /dev/null
+++ b/archives/onegit/patches/0011-another-round-of-hard-coded-clone-whack-a-mole-in-sv.patch
@@ -0,0 +1,36 @@
+From 36c59f42fcb363d810449ed0c96beac0af992e6e Mon Sep 17 00:00:00 2001
+From: Norbert Thiebaud <nthiebaud@gmail.com>
+Date: Mon, 1 Aug 2011 18:27:06 -0500
+Subject: [PATCH] another round of hard-coded /clone whack-a-mole in svtools this time.
+
+---
+ svtools/qa/cppunit/filters-test.cxx | 12 ++++++------
+ 1 files changed, 6 insertions(+), 6 deletions(-)
+
+diff --git a/svtools/qa/cppunit/filters-test.cxx b/svtools/qa/cppunit/filters-test.cxx
+index c5d43af..490f8d5 100644
+--- a/svtools/qa/cppunit/filters-test.cxx
++++ b/svtools/qa/cppunit/filters-test.cxx
+@@ -134,13 +134,13 @@ void FiltersTest::recursiveScan(const rtl::OUString &rURL, int nExpected)
+
+ void FiltersTest::testCVEs()
+ {
+- recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/libs-gui/svtools/qa/cppunit/data/wmf/pass")), true);
+- recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/libs-gui/svtools/qa/cppunit/data/wmf/fail")), false);
+- recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/libs-gui/svtools/qa/cppunit/data/wmf/indeterminate")), 2);
++ recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/svtools/qa/cppunit/data/wmf/pass")), true);
++ recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/svtools/qa/cppunit/data/wmf/fail")), false);
++ recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/svtools/qa/cppunit/data/wmf/indeterminate")), 2);
+
+- recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/libs-gui/svtools/qa/cppunit/data/emf/pass")), true);
+- recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/libs-gui/svtools/qa/cppunit/data/emf/fail")), false);
+- recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/libs-gui/svtools/qa/cppunit/data/emf/indeterminate")), 2);
++ recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/svtools/qa/cppunit/data/emf/pass")), true);
++ recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/svtools/qa/cppunit/data/emf/fail")), false);
++ recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/svtools/qa/cppunit/data/emf/indeterminate")), 2);
+ }
+
+ FiltersTest::FiltersTest()
+--
+1.7.3.4
+
diff --git a/archives/onegit/patches/0012-do-not-use-hard-code-clone-in-writerfilter-tests.patch b/archives/onegit/patches/0012-do-not-use-hard-code-clone-in-writerfilter-tests.patch
new file mode 100644
index 0000000..123d287
--- /dev/null
+++ b/archives/onegit/patches/0012-do-not-use-hard-code-clone-in-writerfilter-tests.patch
@@ -0,0 +1,27 @@
+From a1909e7a1ec58fca5d151c1630877afe96896232 Mon Sep 17 00:00:00 2001
+From: Norbert Thiebaud <nthiebaud@gmail.com>
+Date: Thu, 4 Aug 2011 20:57:49 -0500
+Subject: [PATCH] do not use hard-code/clone/in writerfilter tests
+
+---
+ writerfilter/qa/cppunittests/rtftok/testrtftok.cxx | 4 ++--
+ 1 files changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/writerfilter/qa/cppunittests/rtftok/testrtftok.cxx b/writerfilter/qa/cppunittests/rtftok/testrtftok.cxx
+index 7ed42dc..fbff05f 100644
+--- a/writerfilter/qa/cppunittests/rtftok/testrtftok.cxx
++++ b/writerfilter/qa/cppunittests/rtftok/testrtftok.cxx
+@@ -160,8 +160,8 @@ void RtfTest::recursiveScan(const rtl::OUString &rURL, bool bExpected)
+
+ void RtfTest::test()
+ {
+- recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/filters/writerfilter/qa/cppunittests/rtftok/data/pass")), true);
+- recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/clone/filters/writerfilter/qa/cppunittests/rtftok/data/fail")), false);
++ recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/writerfilter/qa/cppunittests/rtftok/data/pass")), true);
++ recursiveScan(m_aSrcRoot + rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("/writerfilter/qa/cppunittests/rtftok/data/fail")), false);
+
+ printf("Rtf: tested %d files\n", m_nLoadedDocs);
+ }
+--
+1.7.3.4
+