WrongExpectedVersionException on EventStore Getting started project - eventstoredb

I have re-written the getting-started-with-event-store project to learn what's going on and now for the test CanSaveExistingAggregate() I am getting a WrongExpectedVersionException. The thing is, in order to try and work out what's going on I would like to know what the expected version should be, how can I find this out? In the test, the line repository.Save(firstSaved, Guid.NewGuid(), d => { }); calculates the expected version as 101 and this is where it fails:
[Test]
public void CanSaveExistingAggregate()
{
var savedId = SaveTestAggregateWithoutCustomHeaders(repository, 100 /* excludes TestAggregateCreated */);
var firstSaved = repository.GetById<TestAggregate>(savedId);
Console.WriteLine("version:" + firstSaved.Id);
firstSaved.ProduceEvents(50);
repository.Save(firstSaved, Guid.NewGuid(), d => { });
var secondSaved = repository.GetById<TestAggregate>(savedId);
Assert.AreEqual(150, secondSaved.AppliedEventCount);
}
And the code where the exception is thrown:
public void Save(CommonDomain.IAggregate aggregate, Guid commitId, Action<IDictionary<string, object>> updateHeaders)
{
var commitHeaders = new Dictionary<string, object>
{
{CommitIdHeader, commitId},
{AggregateClrTypeHeader, aggregate.GetType().AssemblyQualifiedName}
};
updateHeaders(commitHeaders);
var streamName = aggregateIdToStreamName(aggregate.GetType(), aggregate.Id);
var newEvents = aggregate.GetUncommittedEvents().Cast<object>().ToList();
var originalVersion = aggregate.Version - newEvents.Count;
var expectedVersion = originalVersion == 0 ? ExpectedVersion.NoStream : originalVersion;
var eventsToSave = newEvents.Select(e => ToEventData(Guid.NewGuid(), e, commitHeaders)).ToList();
if (eventsToSave.Count < WritePageSize)
{
eventStoreConnection.AppendToStream(streamName, expectedVersion, eventsToSave);
}
else
{
var transaction = eventStoreConnection.StartTransaction(streamName, expectedVersion);
var position = 0;
while (position < eventsToSave.Count)
{
var pageEvents = eventsToSave.Skip(position).Take(WritePageSize);
transaction.Write(pageEvents);
position += WritePageSize;
}
transaction.Commit();
}
aggregate.ClearUncommittedEvents();
}
All the other tests pass (except ThrowsOnGetDeletedAggregate() but I'll ask about that later) but I think this is the only test that has expectedVersion != ThrowsOnGetDeletedAggregate()

Well it turns out it was just a mistake when writing the code, rather than
var expectedVersion = originalVersion == 0 ? ExpectedVersion.NoStream : originalVersion;
it should be
var expectedVersion = originalVersion == 0 ? ExpectedVersion.NoStream : originalVersion - 1;

Related

Dart: Class members seem to be wrong when access from a separate class

Anyone know if this is a Dart bug or is it my misunderstanding of how Dart coding works?
I am learning Dart to investigate feasibility of eventually using Flutter; however, while exploring the language, I found a weird behavior (maybe a bug). I tried repro'ing it by writing a similar pattern of code, but have yet to figure out what causes it. In the attached code, I wrote a quicksort class. In that class, it counts the number of times the "sort" method is recursed and saves the count in a class member called "recurseCount".
From the main() class, if I use the QuickSort class directly, I have no issue getting back the recurseCount member; however, if I call it from a different class (called "Tester"), I do not get the correct value for "recurseCount". Why would calling a class from a separate class cause members to not provide the correct values?
import 'package:test/test.dart';
import 'dart:math' as _math;
// ***********************
enum SortOrder { ascending, descending, unsorted }
class QuickSort {
List list = [];
SortOrder sortOrder = SortOrder.unsorted;
int recurseCount = 0;
QuickSort({this.list}) {
if (list != null && list.length > 1) {
list = sort(useRandomPivot: true);
}
}
List sort(
{List iList,
int leftIndex = 0,
int rightIndex,
bool useRandomPivot = true}) {
if (iList != null) list = iList;
if (list.isEmpty) return [];
rightIndex ??= list.length - 1;
if (rightIndex > list.length - 1) rightIndex = list.length - 1;
if (leftIndex < rightIndex) {
recurseCount++;
var partitionIndex =
_partition(leftIndex, rightIndex, useRandomPivot: useRandomPivot);
if (partitionIndex == -1) {
//already sorted List
if (sortOrder == SortOrder.ascending) {
return list;
} else {
//SortOrder.descending
list = list.reversed.toList(); // Time Complexity of O(n)
sortOrder = SortOrder.ascending;
return list;
}
} else {
sort(leftIndex: leftIndex, rightIndex: partitionIndex - 1);
sort(leftIndex: partitionIndex + 1, rightIndex: rightIndex);
}
} else {
sortOrder = SortOrder.ascending;
}
return list;
}
int _partition(int leftIndex, int rightIndex, {bool useRandomPivot = true}) {
// in case the array is already sorted from the start; only run through the partition'ing one time
// note: regardless of ascending or descending order
if (leftIndex == 0 && rightIndex >= list.length - 1) {
sortOrder = checkSorting(list);
if (sortOrder != SortOrder.unsorted) {
return -1;
}
if (useRandomPivot) {
var random = _math.Random();
var randomIndex = random.nextInt(rightIndex - leftIndex);
_swapElements(randomIndex, rightIndex);
}
}
int pivotVal = list[rightIndex]; //select the last item as the pivot
var headIndex = leftIndex - 1;
for (var scanIndex = leftIndex; scanIndex < rightIndex; scanIndex++) {
if (list[scanIndex] <= pivotVal) {
headIndex++;
_swapElements(headIndex, scanIndex);
}
}
var partitionIndex = headIndex + 1;
_swapElements(partitionIndex, rightIndex);
return partitionIndex;
}
void _swapElements(position1, position2) {
int tempVal = list[position1];
list[position1] = list[position2];
list[position2] = tempVal;
}
SortOrder checkSorting(List arr) {
var isAsc = true;
var isDesc = true;
for (var i = 0; i < arr.length - 1; i++) {
if (arr[i] < arr[i + 1]) isDesc = false;
if (arr[i] > arr[i + 1]) isAsc = false;
if (!isDesc && !isAsc) break; // not sorted Asc or Desc
}
if (isAsc) {
return SortOrder.ascending;
} else if (isDesc) return SortOrder.descending;
return SortOrder.unsorted;
}
}
// ***********************TESTS*********************************
num log2(num n) => _math.log(n) / _math.ln2;
List getRandomIntList({int min = 0, int max = 10000, int len}) {
var random = _math.Random();
var tempList = List(len);
for (var i = 0; i < len; i++) {
tempList[i] = random.nextInt(max - min);
}
return tempList;
}
class Tester {
static num _runControlTest(List list) {
var stopwatch = Stopwatch();
stopwatch.start();
list.sort();
stopwatch.stop();
return stopwatch.elapsedMicroseconds;
}
static void runTests(List list, String groupName,
{bool useRandomPivot = false, bool sortFromConstructor = false}) {
// CONTROL
var controlElapsedTimeMicroSec = 0;
controlElapsedTimeMicroSec = _runControlTest(list);
// END CONTROL
var expectedRecursionCountLogN = log2(list.length).ceil();
var expectedRecursionCountNLogN = list.length * expectedRecursionCountLogN;
var qs = QuickSort();
var stopwatch = Stopwatch();
stopwatch.start();
list =
qs.sort(iList: list, useRandomPivot: useRandomPivot); // METHOD TO TEST
stopwatch.stop();
var elapsedTimeMicroSec = stopwatch.elapsedMicroseconds;
var recursionCount = qs
.recurseCount; //NOTE (BUG in Dart?): unable to get the recurseCount correctly from within this class/method
group(groupName, () {
var testSubject =
'Time Taken: ${controlElapsedTimeMicroSec} microseconds';
var reason =
'The built in sort took ${controlElapsedTimeMicroSec} microseconds, while the test took ${elapsedTimeMicroSec}.';
test(testSubject, () {
expect(
elapsedTimeMicroSec, lessThanOrEqualTo(controlElapsedTimeMicroSec),
reason: reason);
});
testSubject =
'Time Complexity: ${recursionCount} vs ${expectedRecursionCountNLogN}';
reason =
'Time Complexity of ${recursionCount} is greater than either range (LogN) ${expectedRecursionCountLogN} or (N*LogN) ${expectedRecursionCountNLogN}';
test(testSubject, () {
expect(recursionCount, lessThanOrEqualTo(expectedRecursionCountNLogN),
reason: reason);
});
});
}
}
void main() {
var min = 0;
var len = 1000000;
var max = len;
var originalList = getRandomIntList(min: min, max: max, len: len);
var list = List.from(originalList);
// BUG? When called within this Tester.runTests the sort method does NOT return the correct recurseCount
Tester.runTests(list, 'UNSORTED_RIGHT_PIVOT', useRandomPivot: false);
list = List.from(originalList);
var qs = QuickSort();
// When called directly from main() the sort method DOES return the correct recurseCount
var stopwatch = Stopwatch()..start();
qs.sort(iList: list, useRandomPivot: true); //METHOD TO TEST
stopwatch.stop();
group('UNSORTED_RANDOM_PIVOT', () {
test('Time Taken: ${stopwatch.elapsedMicroseconds} microseconds', () {
expect(stopwatch.elapsedMicroseconds,
lessThanOrEqualTo(Tester._runControlTest(originalList)));
});
var nLogN = (list.length * (log2(list.length).ceil()));
test('Time Complexity: ${qs.recurseCount} vs $nLogN', () {
expect(qs.recurseCount, lessThanOrEqualTo(nLogN));
});
});
}

how to save list of data in memory?

I have an ajax function which is called by the jquery-datatable and ve two responsibility.
To get data from the database.
To serve the search, sort, pagination like functional work.
Now all I need is I just wanna get data once and save it in memory so that when user type something in the search box it performs the search from stored data directly.
Here the code.
public ActionResult AjaxOil(JQueryDataTableParamModel param)
{
//To get data and should be run only once.
IEnumerable<Oil> allOils = _context.Oils.ToList();
//All others function.
IEnumerable<Oil> filteredOils;
if (!string.IsNullOrEmpty(param.sSearch))
{
filteredOils = allOils
.Where(c => c.CommonName.Contains(param.sSearch)
||
c.BotanicalName.Contains(param.sSearch)
||
c.PlantParts.Contains(param.sSearch)
||
c.Distillation.Contains(param.sSearch));
}
else
{
filteredOils = allOils;
}
var sortColumnIndex = Convert.ToInt32(Request["iSortCol_0"]);
Func<Oil, string> orderingFunction = (c => sortColumnIndex == 1 ? c.CommonName :
sortColumnIndex == 2 ? c.BotanicalName :
c.PlantParts);
var distillationFilter = Convert.ToString(Request["sSearch_4"]);
var commonFilter = Convert.ToString(Request["sSearch_1"]);
var botanicalFilter = Convert.ToString(Request["sSearch_2"]);
var plantFilter = Convert.ToString(Request["sSearch_3"]);
if (!string.IsNullOrEmpty(commonFilter))
{
filteredOils = filteredOils.Where(c => c.CommonName.Contains(commonFilter));
}
if (!string.IsNullOrEmpty(botanicalFilter))
{
filteredOils = filteredOils.Where(c => c.BotanicalName.Contains(botanicalFilter));
}
if (!string.IsNullOrEmpty(plantFilter))
{
filteredOils = filteredOils.Where(c => c.PlantParts.Contains(plantFilter));
}
if (!string.IsNullOrEmpty(distillationFilter))
{
filteredOils = filteredOils.Where(c => c.Distillation.Contains(distillationFilter));
}
var sortDirection = Request["sSortDir_0"];
if (sortDirection == "asc")
filteredOils = filteredOils.OrderBy(orderingFunction);
else
filteredOils = filteredOils.OrderByDescending(orderingFunction);
var displayedOils = filteredOils
.Skip(param.iDisplayStart)
.Take(param.iDisplayLength);
var result = from c in displayedOils
select new[] { Convert.ToString(c.OilId), c.CommonName, c.BotanicalName, c.PlantParts, c.Distillation };
return Json(new
{
sEcho = param.sEcho,
iTotalRecords = allOils.Count(),
iTotalDisplayRecords = filteredOils.Count(),
aaData = result
},
JsonRequestBehavior.AllowGet);
On first load save the data in cache/session/static field. On next search check if the cache/session/static field is not null and read from there, not from db, else take again from db..
Example:
private static ObjectCache _cache = new MemoryCache("MemoryCache");
public List<Oils> GetDataFromCache(string keyName)
{
//private static ObjectCache _cache = new MemoryCache("keyName");
var data = _cache.Get(keyName);
if (data != null) return data as List<Oils>;
data = _context.Oils.ToList();
//keep the cache for 2h
_cache.Add(keyName, data, DateTimeOffset.Now.AddHours(2));
return data;
}
(didn't test the code, but that's the logic) or you can use Session if you prefer
Session example:
if(Session["Data_Oils"] != null) { return Session["Data_Oils"] as List<Oils; } else { var temp = _context.Oils.ToList(); Session["Data_Oils"] = temp; return temp; }

Regex TypeError: Cannot read property '1' of null

My datepicker regular expression is trying matches on a null aray. How do I fix it? Not sure what clazz should equal if the array is null. I'm thinking a simple if (matches[1]) { etc } but I'm not sure what to do if matches is null. Clazz is used elsewhere twice in the code. Do I just set clazz to null or zero?
var matches = exp.match(IS_REGEXP);
var clazz = scope.$eval(matches[1]);
Edit: Here's where they use clazz
if (data.lastActivated !== newActivated) {
if (data.lastActivated) {
$animate.removeClass(data.lastActivated.element, clazz);
}
if (newActivated) {
$animate.addClass(newActivated.element, clazz);
}
data.lastActivated = newActivated;
}
Here's IS_REGEXP
11111111 22222222
var IS_REGEXP = /^\s*([\s\S]+?)\s+for\s+([\s\S]+?)\s*$/;
Double Edit:
Here's the whole function
function addForExp(exp, scope) {
var matches = exp.match(IS_REGEXP);
var clazz = scope.$eval(matches[1]);
var compareWithExp = matches[2];
var data = expToData[exp];
if (!data) {
var watchFn = function(compareWithVal) {
var newActivated = null;
instances.some(function(instance) {
var thisVal = instance.scope.$eval(onExp);
if (thisVal === compareWithVal) {
newActivated = instance;
return true;
}
});
if (data.lastActivated !== newActivated) {
if (data.lastActivated) {
$animate.removeClass(data.lastActivated.element, clazz);
}
if (newActivated) {
$animate.addClass(newActivated.element, clazz);
}
data.lastActivated = newActivated;
}
};
expToData[exp] = data = {
lastActivated: null,
scope: scope,
watchFn: watchFn,
compareWithExp: compareWithExp,
watcher: scope.$watch(compareWithExp, watchFn)
};
}
data.watchFn(scope.$eval(compareWithExp));
}
Setting clazz to null or empty string shall do, if clazz is all your concern.
var clazz = matches ? scope.$eval(matches[1]) : '';
But with compareWithExp, it might be better to exit from the whole logic when there is no match:
if ( ! matches ) return;

Need a map of a map in Dart.lang

Learning Dart.lang . Have a php background. Given this code at this link...
Link to full working php algorithm
or to copy the main part here, this is what i got in PHP and works fine. I need to know how to do it in dart:
foreach ($floors as $x) {
$temp = explode("," , $x);
$floor[$temp[0]][$temp[1]] = $temp[2];
}
library map;
void main() {
var temp = """
1,201,70
1,301,71
3,301,31
1,401,79
2,501,2
1,601,171
2,801,61
2,901,100
5,101,54
4,203,23
3,201,112""";
var values = temp.split('\n');
var floors = new Map<int, Map<int, double>>();
values.forEach((f) {
var v = f.trim().split(',');
var fk = int.parse(v[0]);
var rk = int.parse(v[1]);
var rv = double.parse(v[2]);
if(floors[fk] == null) {
floors[fk] = new Map<int,double>();
}
floors[fk][rk]=rv;
});
floors.forEach((fk, fv) {
var tempSum = 0;
var lowest = null;
for(var rk in fv.keys) {
var temp = fv[rk];
tempSum += temp;
if(lowest == null || (lowest > temp)) {
lowest = temp;
}
}
print("Floor #${fk} has ${fv.length} rooms. The average temp is ${tempSum / fv.length}. The lowest temp is ${lowest}.");
});
}

Moving a work item between projects in TFS

Is it possible to move a work item from one project to another inside TFS? I’ve seen a copy option, but no move. Also, if it is possible, what’s the implication for any of the WI history?
I found this article from 2008 that seem to say it's not, but I wondered if there'd been any progress since then.
It isn't possible to move, just copy. The way we do it, is we do the copy, link the original, then close the original as obsolete. You could also create the copy and TF Destroy the original, but you will lose all history.
If you wanted to, you could get very fancy and create your own "move" utility that copies the workitem and all of the history, then closes out (or destroys) the old one. Seems like overkill for something that you probably shouldn't need to do all that often.
Lars Wilhelmsen wrote a WorkItemMigrator -> http://larsw.codeplex.com/SourceControl/list/changesets
Good starting point for a utility you can customize for your needs. We used it to split off a 100 or so work items to a new project. Here's the program I ended up with. Modify the query to subset the items to migrate.
namespace WorkItemMigrator
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using Microsoft.TeamFoundation.Client;
using Microsoft.TeamFoundation.Framework.Common;
using Microsoft.TeamFoundation.Server;
using Microsoft.TeamFoundation.WorkItemTracking.Client;
class Program
{
#region Members
private static readonly Dictionary<Uri, TfsTeamProjectCollection> Collections = new Dictionary<Uri, TfsTeamProjectCollection>();
private static readonly Uri SourceCollectionUri = new Uri("http://your.domain.com:8080/tfs/DefaultCollection");
private static readonly Uri TargetCollectionUri = new Uri("http://your.domain.com:8080/tfs/DefaultCollection");
private const String Areas = "ProjectModelHierarchy";
private const string Iterations = "ProjectLifecycle";
private const string TargetProjectName = "TargetProject";
private const string MicrosoftVstsCommonStackRankFieldName = "Microsoft.VSTS.Common.StackRank";
private const string MicrosoftVstsCommonPriority = "Microsoft.VSTS.Common.Priority";
private const string TargetWorkItemType = "User Story";
private const string Wiql = "SELECT [System.Id], [System.State], [System.Title], [System.AssignedTo], [System.WorkItemType], [Microsoft.VSTS.Common.Priority], " +
"[System.IterationPath], [System.AreaPath], [System.History], [System.Description] " +
"FROM WorkItems WHERE [System.TeamProject] = 'SourceProject' AND " +
"[System.State] = 'Active' " +
"ORDER BY [System.Id]";
private static WorkItemTypeCollection WorkItemTypes;
private static Dictionary<int, int> WorkItemIdMap = new Dictionary<int, int>();
#endregion
static void Main()
{
var createAreasAndIterations = GetRunMode();
var sourceWorkItemStore = GetSourceWorkItemStore();
var sourceWorkItems = sourceWorkItemStore.Query(Wiql);
var targetWorkItemStore = GetTargetWorkItemStore();
var targetProject = targetWorkItemStore.Projects[TargetProjectName];
WorkItemTypes = targetProject.WorkItemTypes;
foreach (WorkItem sourceWorkItem in sourceWorkItems)
{
if (createAreasAndIterations)
{
Console.WriteLine();
EnsureThatStructureExists(TargetProjectName, Areas, sourceWorkItem.AreaPath.Substring(sourceWorkItem.AreaPath.IndexOf("\\") + 1));
EnsureThatStructureExists(TargetProjectName, Iterations, sourceWorkItem.IterationPath.Substring(sourceWorkItem.IterationPath.IndexOf("\\") + 1));
}
else
{
MigrateWorkItem(sourceWorkItem);
}
}
if (!createAreasAndIterations)
{
var query = from WorkItem wi in sourceWorkItems where wi.Links.Count > 0 select wi;
foreach (WorkItem sourceWorkItem in query)
{
LinkRelatedItems(targetWorkItemStore, sourceWorkItem);
}
}
TextWriter tw = File.CreateText(#"C:\temp\TFS_MigratedItems.csv");
tw.WriteLine("SourceId,TargetId");
foreach (var entry in WorkItemIdMap)
{
tw.WriteLine(entry.Key + "," + entry.Value);
}
tw.Close();
Console.WriteLine();
Console.WriteLine("Done! Have a nice day.");
Console.ReadLine();
}
private static bool GetRunMode()
{
bool createAreasAndIterations;
while (true)
{
Console.Write("Create [A]reas/Iterations or [M]igrate (Ctrl-C to quit)?: ");
var command = Console.ReadLine().ToUpper().Trim();
if (command == "A")
{
createAreasAndIterations = true;
break;
}
if (command == "M")
{
createAreasAndIterations = false;
break;
}
Console.WriteLine("Unknown command " + command + " - try again.");
}
return createAreasAndIterations;
}
private static void MigrateWorkItem(WorkItem sourceWorkItem)
{
var targetWIT = WorkItemTypes[sourceWorkItem.Type.Name];
var newWorkItem = targetWIT.NewWorkItem();
//var newWorkItem = targetWorkItemType.NewWorkItem();
// Description (Task) / Steps to reproduce (Bug)
if (sourceWorkItem.Type.Name != "Bug")
{
newWorkItem.Description = sourceWorkItem.Description;
}
else
{
newWorkItem.Fields["Microsoft.VSTS.TCM.ReproSteps"].Value = sourceWorkItem.Description;
}
// History
newWorkItem.History = sourceWorkItem.History;
// Title
newWorkItem.Title = sourceWorkItem.Title;
// Assigned To
newWorkItem.Fields[CoreField.AssignedTo].Value = sourceWorkItem.Fields[CoreField.AssignedTo].Value;
// Stack Rank - Priority
newWorkItem.Fields[MicrosoftVstsCommonPriority].Value = sourceWorkItem.Fields[MicrosoftVstsCommonPriority].Value;
// Area Path
newWorkItem.AreaPath = FormatPath(TargetProjectName, sourceWorkItem.AreaPath);
// Iteration Path
newWorkItem.IterationPath = FormatPath(TargetProjectName, sourceWorkItem.IterationPath);
// Activity
if (sourceWorkItem.Type.Name == "Task")
{
newWorkItem.Fields["Microsoft.VSTS.Common.Activity"].Value = sourceWorkItem.Fields["Microsoft.VSTS.Common.Discipline"].Value;
}
// State
//newWorkItem.State = sourceWorkItem.State;
// Reason
//newWorkItem.Reason = sourceWorkItem.Reason;
// build a usable rendition of prior revision history
RevisionCollection revisions = sourceWorkItem.Revisions;
var query = from Revision r in revisions orderby r.Fields["Changed Date"].Value descending select r;
StringBuilder sb = new StringBuilder(String.Format("Migrated from work item {0}<BR />\n", sourceWorkItem.Id));
foreach (Revision revision in query)
{
String history = (String)revision.Fields["History"].Value;
if (!String.IsNullOrEmpty(history))
{
foreach (Field f in revision.Fields)
{
if (!Object.Equals(f.Value, f.OriginalValue))
{
if (f.Name == "History")
{
string notation = string.Empty;
if (revision.Fields["State"].OriginalValue != revision.Fields["State"].Value)
{
notation = String.Format("({0} to {1})", revision.Fields["State"].OriginalValue, revision.Fields["State"].Value);
}
//Console.WriteLine("<STRONG>{0} Edited {3} by {1}</STRONG><BR />\n{2}", revision.Fields["Changed Date"].Value.ToString(), revision.Fields["Changed By"].Value.ToString(), f.Value, notation);
sb.Append(String.Format("<STRONG>{0} Edited {3} by {1}</STRONG><BR />\n{2}<BR />\n", revision.Fields["Changed Date"].Value.ToString(), revision.Fields["Changed By"].Value.ToString(), f.Value, notation));
}
}
}
//Console.WriteLine("Revision {0}: ", revision.Fields["Rev"].Value);
//Console.WriteLine(" ChangedDate: " + revision.Fields["ChangedDate"].Value);
//Console.WriteLine(" History: " + sb.ToString());
}
}
newWorkItem.History = sb.ToString();
// Attachments
for (var i = 0; i < sourceWorkItem.AttachedFileCount; i++)
{
CopyAttachment(sourceWorkItem.Attachments[i], newWorkItem);
}
// Validate before save
if (!newWorkItem.IsValid())
{
var reasons = newWorkItem.Validate();
Console.WriteLine(string.Format("Could not validate new work item (old id: {0}).", sourceWorkItem.Id));
foreach (Field reason in reasons)
{
Console.WriteLine("Field: " + reason.Name + ", Status: " + reason.Status + ", Value: " + reason.Value);
}
}
else
{
Console.Write("[" + sourceWorkItem.Id + "] " + newWorkItem.Title);
try
{
newWorkItem.Save(SaveFlags.None);
Console.ForegroundColor = ConsoleColor.Cyan;
Console.WriteLine(string.Format(" [saved: {0}]", newWorkItem.Id));
WorkItemIdMap.Add(sourceWorkItem.Id, newWorkItem.Id);
Console.ResetColor();
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
throw;
}
}
}
private static void CopyAttachment(Attachment attachment, WorkItem newWorkItem)
{
using (var client = new WebClient())
{
client.UseDefaultCredentials = true;
client.DownloadFile(attachment.Uri, attachment.Name);
var newAttachment = new Attachment(attachment.Name, attachment.Comment);
newWorkItem.Attachments.Add(newAttachment);
}
}
private static void LinkRelatedItems(WorkItemStore targetWorkItemStore, WorkItem sourceWorkItem)
{
int newId = WorkItemIdMap[sourceWorkItem.Id];
WorkItem targetItem = targetWorkItemStore.GetWorkItem(newId);
foreach (Link l in sourceWorkItem.Links)
{
if (l is RelatedLink)
{
RelatedLink sl = l as RelatedLink;
switch (sl.ArtifactLinkType.Name)
{
case "Related Workitem":
{
if (WorkItemIdMap.ContainsKey(sl.RelatedWorkItemId))
{
int RelatedWorkItemId = WorkItemIdMap[sl.RelatedWorkItemId];
RelatedLink rl = new RelatedLink(sl.LinkTypeEnd, RelatedWorkItemId);
// !!!!
// this does not work - need to check the existing links to see if one exists already for the linked workitem.
// using contains expects the same object and that's not what I'm doing here!!!!!!
//if (!targetItem.Links.Contains(rl))
// !!!!
var query = from RelatedLink qrl in targetItem.Links where qrl.RelatedWorkItemId == RelatedWorkItemId select qrl;
if (query.Count() == 0)
{
targetItem.Links.Add(rl); ;
// Validate before save
if (!targetItem.IsValid())
{
var reasons = targetItem.Validate();
Console.WriteLine(string.Format("Could not validate work item (old id: {0}) related link id {1}.", sourceWorkItem.Id, sl.RelatedWorkItemId));
foreach (Field reason in reasons)
{
Console.WriteLine("Field: " + reason.Name + ", Status: " + reason.Status + ", Value: " + reason.Value);
}
}
else
{
try
{
targetItem.Save(SaveFlags.None);
Console.ForegroundColor = ConsoleColor.Cyan;
Console.WriteLine(string.Format(" [Updated: {0}]", targetItem.Id));
Console.ResetColor();
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
throw;
}
}
}
}
break;
}
default:
{ break; }
}
}
}
}
private static void EnsureThatStructureExists(string projectName, string structureType, string structurePath)
{
var parts = structurePath.Split('\\');
var css = GetCommonStructureService();
var projectInfo = css.GetProjectFromName(projectName);
var parentNodeUri = GetCssStructure(GetCommonStructureService(), projectInfo.Uri, structureType).Uri;
var currentPath = FormatPath(projectName, structureType == Areas ? "Area" : "Iteration");
foreach (var part in parts)
{
currentPath = FormatPath(currentPath, part);
Console.Write(currentPath);
try
{
var currentNode = css.GetNodeFromPath(currentPath);
parentNodeUri = currentNode.Uri;
Console.ForegroundColor = ConsoleColor.DarkGreen;
Console.WriteLine(" [found]");
}
catch
{
parentNodeUri = css.CreateNode(part, parentNodeUri);
Console.ForegroundColor = ConsoleColor.Green;
Console.WriteLine(" [created]");
}
Console.ResetColor();
}
}
private static string FormatPath(string currentPath, string part)
{
part = part.Substring(part.IndexOf("\\") + 1);
currentPath = string.Format(#"{0}\{1}", currentPath, part);
return currentPath;
}
private static TfsTeamProjectCollection GetProjectCollection(Uri uri)
{
TfsTeamProjectCollection collection;
if (!Collections.TryGetValue(uri, out collection))
{
collection = TfsTeamProjectCollectionFactory.GetTeamProjectCollection(uri);
collection.Connect(ConnectOptions.IncludeServices);
collection.Authenticate();
Collections.Add(uri, collection);
}
return Collections[uri];
}
private static WorkItemStore GetSourceWorkItemStore()
{
var collection = GetProjectCollection(SourceCollectionUri);
return collection.GetService<WorkItemStore>();
}
private static WorkItemStore GetTargetWorkItemStore()
{
var collection = GetProjectCollection(TargetCollectionUri);
return collection.GetService<WorkItemStore>();
}
public static NodeInfo GetCssStructure(ICommonStructureService css, String projectUri, String structureType)
{
return css.ListStructures(projectUri).FirstOrDefault(node => node.StructureType == structureType);
}
private static ICommonStructureService GetCommonStructureService()
{
var collection = GetProjectCollection(TargetCollectionUri);
return collection.GetService<ICommonStructureService>();
}
}
}

Resources