How to append key to an image's name asp.net mvc - asp.net-mvc

Here is my code to append postfix to an image:
private void SaveUpload(SledBuck sledBuck )
{
if (Request.Files.Count > 0)
{
var postFix = "_" + sledBuck.Id;
for (var index = 0; index < Request.Files.Count; index++)
{
if (Request.Files[index].ContentLength > 0)
{
var hpf = Request.Files[index];
var postedFileName = string.Format("{0}-{1}", Path.GetFileName(hpf.FileName), postFix);
string folderPath = ConfigurationManager.AppSettings["ImagePath"];
var savedFileName = Path.Combine(Server.MapPath(folderPath), postedFileName);
hpf.SaveAs(savedFileName); // Save the file
}
}
}
}
Let's say the image's name is image.jpg. I want to append primary key to it's name before updating it like image_1.jpg. For the code above the postedFileName = image.jpg_1 which is not what I want. How can I append the Id properly?

so basically what you get as hpf.FileName is FileName + Extension. You need to get the extension and work accordingly,
so proceed like this :
var hpf = Request.Files[index];
var FileExtension = Path.GetExtension(hpf.FileName);
var FileNameWithoutExtension = hpf.FileName.Replace(FileExtension, "");
var FileUniqueName = String.Format("{0}_{1}{2}", FileNameWithoutExtension, GenerateDateTimeStamp(), FileExtension);
string folderPath = ConfigurationManager.AppSettings["ImagePath"];
var savedFileName = Path.Combine(Server.MapPath(folderPath), FileUniqueName);
hpf.SaveAs(savedFileName); // Save the file
where GenerateDateTimeStamp() is
public string GenerateDateTimeStamp()
{
return DateTime.Now.ToString("ddmmyyyyMMhhssttt");
}
in here you can have your primary keys aswell.
this should give you some idea

Related

Xamarin.ios: Detail command is null because it shows before function

I'm building an iOS app with Xamarin.ios MvvmCross. And I have a function that puts a random id in a text file every day. So I get a recipe of the day.
The problem is that the code for the Detail command function (for the button) runs before the function that stores everything in the text file. So the detail command returns null and nothing happens when I push the button. The second time I run the code it does what it should do because there's already an id stored in the text file.
The view:
public override void ViewDidLoad()
{
base.ViewDidLoad();
MvxFluentBindingDescriptionSet<TabHomeView, TabHomeViewModel> set = new MvxFluentBindingDescriptionSet<TabHomeView, TabHomeViewModel>(this);
set.Bind(MorningImage).For(img => img.Image).To(res => res.MorningContent.picture).WithConversion<StringToImageConverter>();
set.Bind(MorningJuiceName).To(vm => vm.MorningContent.name);
set.Bind(MorningBtn)
.To(vm => vm.NavigateToMorningJuice);
set.Apply();
}
The function to put a random id in the text file:
public async void GetAfternoonJuice()
{
Recipes = await _recipeService.GetRecipes();
int counter = Recipes.Count;
Random rnd = new Random();
int RandomNumber = rnd.Next(1, counter);
string rndNumToStr = RandomNumber.ToString();
DateTime dateAndTime = DateTime.Now;
string day = dateAndTime.ToString("dd/MM/yyyy");
string folderValue = (day + "," + rndNumToStr);
var _folderName = "TextFilesFolder2";
var _fileName = "AfternoonJuice";
if (!_fileStore.FolderExists(_folderName))
_fileStore.EnsureFolderExists(_folderName);
//Content van de file uitlezen
string value = string.Empty;
_fileStore.TryReadTextFile(_folderName + "/" + _fileName, out (value));
string CheckFileContent = value;
string[] TextFileList;
//Als er niets in zit, default data in steken
if (CheckFileContent == null)
{
_fileStore.WriteFile(_folderName + "/" + _fileName, "00/00/00,0");
string d = "00/00/00,0";
TextFileList = d.Split(',');
}
else
{
TextFileList = CheckFileContent.Split(',');
}
if (TextFileList[0] != day)
{
//File verwijderen om overbodige data te verwijderen.
_fileStore.DeleteFile(_folderName + "/" + _fileName);
//File aanmaken.
if (!_fileStore.FolderExists(_folderName))
_fileStore.EnsureFolderExists(_folderName);
_fileStore.WriteFile(_folderName + "/" + _fileName, folderValue);
string NewValue = string.Empty;
_fileStore.TryReadTextFile(_folderName + "/" + _fileName, out (NewValue));
string NValue = NewValue;
List<string> NewTextFileList = new List<string>(
NValue.Split(new string[] { "," }, StringSplitOptions.None));
int numVall = Int32.Parse(NewTextFileList[1]);
int NewRandomValue = numVall;
AfternoonContent = await _recipeService.GetRecipeById(NewRandomValue);
RaisePropertyChanged(() => AfternoonContent);
}
else
{
int numVall = Int32.Parse(TextFileList[1]);
int NewRandomValue = numVall;
AfternoonContent = await _recipeService.GetRecipeById(NewRandomValue);
RaisePropertyChanged(() => AfternoonContent);
}
}
The detail command:
public MvxCommand<Recipe> NavigateToAfternoonJuice
{
get
{
var _folderName = "TextFilesFolder2";
var _fileName = "AfternoonJuice";
string value = string.Empty;
_fileStore.TryReadTextFile(_folderName + "/" + _fileName, out (value));
string fV = value;
List<string> TextFileList = new List<string>(
fV.Split(new string[] { "," }, StringSplitOptions.None));
int numVall = Int32.Parse(TextFileList[1]);
int NewRandomValue = numVall;
return new MvxCommand<Recipe>(SelectedRecipe =>
{
ShowViewModel<DetailJuiceListViewModel>(new { RecipeId = NewRandomValue });
});
}
}
Some of code in your public property NavigateToAfternoonJuice runs before your command is executed. It will be run, when the binding occurs and not when the command actually executes the body.
You probably want to modify your command to something as follows instead.
private MvxCommand<Recipe> _navigateToAfternoonJuice;
public MvxCommand<Recipe> NavigateToAfternoonJuice
{
get
{
if (_navigateToAfternoonJuice == null)
_navigateToAfternoonJuice = new MvxCommand<Recipe>(DoNavigateToAfternoonJuice);
return _navigateToAfternoonJuice;
}
}
private void DoNavigateToAfternoonJuice(Reciepe selectedRecipe)
{
var _folderName = "TextFilesFolder2";
var _fileName = "AfternoonJuice";
string value = string.Empty;
_fileStore.TryReadTextFile(_folderName + "/" + _fileName, out (value));
string fV = value;
List<string> TextFileList = new List<string>(
fV.Split(new string[] { "," }, StringSplitOptions.None));
int numVall = Int32.Parse(TextFileList[1]);
int NewRandomValue = numVall;
ShowViewModel<DetailJuiceListViewModel>(new { RecipeId = NewRandomValue });
}
This will make the text file to be read when the command executes.

WrongExpectedVersionException on EventStore Getting started project

I have re-written the getting-started-with-event-store project to learn what's going on and now for the test CanSaveExistingAggregate() I am getting a WrongExpectedVersionException. The thing is, in order to try and work out what's going on I would like to know what the expected version should be, how can I find this out? In the test, the line repository.Save(firstSaved, Guid.NewGuid(), d => { }); calculates the expected version as 101 and this is where it fails:
[Test]
public void CanSaveExistingAggregate()
{
var savedId = SaveTestAggregateWithoutCustomHeaders(repository, 100 /* excludes TestAggregateCreated */);
var firstSaved = repository.GetById<TestAggregate>(savedId);
Console.WriteLine("version:" + firstSaved.Id);
firstSaved.ProduceEvents(50);
repository.Save(firstSaved, Guid.NewGuid(), d => { });
var secondSaved = repository.GetById<TestAggregate>(savedId);
Assert.AreEqual(150, secondSaved.AppliedEventCount);
}
And the code where the exception is thrown:
public void Save(CommonDomain.IAggregate aggregate, Guid commitId, Action<IDictionary<string, object>> updateHeaders)
{
var commitHeaders = new Dictionary<string, object>
{
{CommitIdHeader, commitId},
{AggregateClrTypeHeader, aggregate.GetType().AssemblyQualifiedName}
};
updateHeaders(commitHeaders);
var streamName = aggregateIdToStreamName(aggregate.GetType(), aggregate.Id);
var newEvents = aggregate.GetUncommittedEvents().Cast<object>().ToList();
var originalVersion = aggregate.Version - newEvents.Count;
var expectedVersion = originalVersion == 0 ? ExpectedVersion.NoStream : originalVersion;
var eventsToSave = newEvents.Select(e => ToEventData(Guid.NewGuid(), e, commitHeaders)).ToList();
if (eventsToSave.Count < WritePageSize)
{
eventStoreConnection.AppendToStream(streamName, expectedVersion, eventsToSave);
}
else
{
var transaction = eventStoreConnection.StartTransaction(streamName, expectedVersion);
var position = 0;
while (position < eventsToSave.Count)
{
var pageEvents = eventsToSave.Skip(position).Take(WritePageSize);
transaction.Write(pageEvents);
position += WritePageSize;
}
transaction.Commit();
}
aggregate.ClearUncommittedEvents();
}
All the other tests pass (except ThrowsOnGetDeletedAggregate() but I'll ask about that later) but I think this is the only test that has expectedVersion != ThrowsOnGetDeletedAggregate()
Well it turns out it was just a mistake when writing the code, rather than
var expectedVersion = originalVersion == 0 ? ExpectedVersion.NoStream : originalVersion;
it should be
var expectedVersion = originalVersion == 0 ? ExpectedVersion.NoStream : originalVersion - 1;

Need a map of a map in Dart.lang

Learning Dart.lang . Have a php background. Given this code at this link...
Link to full working php algorithm
or to copy the main part here, this is what i got in PHP and works fine. I need to know how to do it in dart:
foreach ($floors as $x) {
$temp = explode("," , $x);
$floor[$temp[0]][$temp[1]] = $temp[2];
}
library map;
void main() {
var temp = """
1,201,70
1,301,71
3,301,31
1,401,79
2,501,2
1,601,171
2,801,61
2,901,100
5,101,54
4,203,23
3,201,112""";
var values = temp.split('\n');
var floors = new Map<int, Map<int, double>>();
values.forEach((f) {
var v = f.trim().split(',');
var fk = int.parse(v[0]);
var rk = int.parse(v[1]);
var rv = double.parse(v[2]);
if(floors[fk] == null) {
floors[fk] = new Map<int,double>();
}
floors[fk][rk]=rv;
});
floors.forEach((fk, fv) {
var tempSum = 0;
var lowest = null;
for(var rk in fv.keys) {
var temp = fv[rk];
tempSum += temp;
if(lowest == null || (lowest > temp)) {
lowest = temp;
}
}
print("Floor #${fk} has ${fv.length} rooms. The average temp is ${tempSum / fv.length}. The lowest temp is ${lowest}.");
});
}

Silverlight 3 File Dialog Box

Ok - I have a WCF Service which reads an excel file from a certain location and strips the data into an object. What I need is the ability to allow users of my program to Upload an excel sheet to the file location that my Service uses.
Alternitivley I could pass the Uploaded excel sheet to the service directly.
Can anyone help with this. My service code is:
public List<ImportFile> ImportExcelData(string FileName)
{
//string dataSource = Location + FileName;
string dataSource = Location;
string conStr = "Provider=Microsoft.Jet.OLEDB.4.0;" + "Data Source=" + dataSource.ToString() + ";Extended Properties=Excel 8.0;";
var con = new OleDbConnection(conStr);
con.Open();
var data = con.GetOleDbSchemaTable(OleDbSchemaGuid.Tables, null);
var sheetName = data.Rows[0]["TABLE_NAME"].ToString();
OleDbCommand cmd = new OleDbCommand("SELECT * FROM [" + sheetName + "] WHERE Status = '4'", con);
OleDbDataAdapter oleda = new OleDbDataAdapter();
oleda.SelectCommand = cmd;
DataSet ds = new DataSet();
oleda.Fill(ds, "Employees");
DataTable dt = ds.Tables[0];
var _impFiles = new List<ImportFile>();
foreach (DataRow row in dt.Rows)
{
var _import = new ImportFile();
_import.PurchaseOrder = row[4].ToString();
try
{
var ord = row[8].ToString();
DateTime dati = Convert.ToDateTime(ord);
_import.ShipDate = dati;
}
catch (Exception)
{
_import.ShipDate = null;
}
ImportFile additionalData = new ImportFile();
additionalData = GetAdditionalData(_import.PurchaseOrder);
_import.NavOrderNo = additionalData.NavOrderNo;
_import.IsInstall = additionalData.IsInstall;
_import.SalesOrderId = additionalData.SalesOrderId;
_import.ActivityID = additionalData.ActivityID;
_import.Subject = additionalData.Subject ;
_import.IsMatched = (_import.ShipDate != null & _import.NavOrderNo != "" & _import.NavOrderNo != null & _import.ShipDate > DateTime.Parse("01/01/1999") ? true : false);
_import.UpdatedShipToField = false;
_import.UpdatedShipToFieldFailed = false;
_import.CreateNote = false;
_import.CreateNoteFailed = false;
_import.CompleteTask = false;
_import.CompleteTaskFailed = false;
_import.FullyCompleted = 0;
_import.NotCompleted = false;
_impFiles.Add(_import);
}
oleda.Dispose();
con.Close();
//File.Delete(dataSource);
return _impFiles;
}
You will want to modify your service to accept a Stream instead of a filename, then you can save if off to a file (or parse it directly from the Stream, although I don't know how to do that).
Then in your Silverlight app you could do something like this:
private void Button_Click(object sender, RoutedEventArgs ev)
{
var dialog = new OpenFileDialog();
dialog.Filter = "Excel Files (*.xls;*.xlsx;*.xlsm)|*.xls;*.xlsx;*.xlsm|All Files (*.*)|*.*";
if (dialog.ShowDialog() == true)
{
var fileStream = dialog.File.OpenRead();
var proxy = new WcfService();
proxy.ImportExcelDataCompleted += (s, e) =>
{
MessageBox.Show("Import Data is at e.Result");
// don't forget to close the stream
fileStream.Close();
};
proxy.ImportExcelDataAsync(fileStream);
}
}
You could also have your WCF service accept a byte[] and do something like this.
private void Button_Click(object sender, RoutedEventArgs ev)
{
var dialog = new OpenFileDialog();
dialog.Filter = "Excel Files (*.xls;*.xlsx;*.xlsm)|*.xls;*.xlsx;*.xlsm|All Files (*.*)|*.*";
if (dialog.ShowDialog() == true)
{
var length = dialog.File.Length;
var fileContents = new byte[length];
using (var fileStream = dialog.File.OpenRead())
{
if (length > Int32.MaxValue)
{
throw new Exception("Are you sure you want to load > 2GB into memory. There may be better options");
}
fileStream.Read(fileContents, 0, (int)length);
}
var proxy = new WcfService();
proxy.ImportExcelDataCompleted += (s, e) =>
{
MessageBox.Show("Import Data is at e.Result");
// no need to close any streams this way
};
proxy.ImportExcelDataAsync(fileContents);
}
}
Update
Your service could look like this:
public List<ImportFile> ImportExcelData(Stream uploadedFile)
{
var tempFile = HttpContext.Current.Server.MapPath("~/uploadedFiles/" + Path.GetRandomFileName());
try
{
using (var tempStream = File.OpenWrite(tempFile))
{
uploadedFile.CopyTo(tempStream);
}
//string dataSource = Location + FileName;
string dataSource = tempFile;
string conStr = "Provider=Microsoft.Jet.OLEDB.4.0;" + "Data Source=" + dataSource.ToString() +
";Extended Properties=Excel 8.0;";
var con = new OleDbConnection(conStr);
con.Open();
}
finally
{
if (File.Exists(tempFile))
File.Delete(tempFile);
}
}
Thanks Bendewey that was great. Had to amend it slightly -
My Service:
var tempFile = #"c:\temp\" + Path.GetRandomFileName();
try
{
int length = 256;
int bytesRead = 0;
Byte[] buffer = new Byte[length];
// write the required bytes
using (FileStream fs = new FileStream(tempFile, FileMode.Create))
{
do
{
bytesRead = uploadedFile.Read(buffer, 0, length);
fs.Write(buffer, 0, bytesRead);
}
while (bytesRead == length);
}
uploadedFile.Dispose();
string conStr = "Provider=Microsoft.Jet.OLEDB.4.0;" + "Data Source=" + dataSource.ToString() + ";Extended Properties=Excel 8.0;";
var con = new OleDbConnection(conStr);
Thanks Again for your help

Moving a work item between projects in TFS

Is it possible to move a work item from one project to another inside TFS? I’ve seen a copy option, but no move. Also, if it is possible, what’s the implication for any of the WI history?
I found this article from 2008 that seem to say it's not, but I wondered if there'd been any progress since then.
It isn't possible to move, just copy. The way we do it, is we do the copy, link the original, then close the original as obsolete. You could also create the copy and TF Destroy the original, but you will lose all history.
If you wanted to, you could get very fancy and create your own "move" utility that copies the workitem and all of the history, then closes out (or destroys) the old one. Seems like overkill for something that you probably shouldn't need to do all that often.
Lars Wilhelmsen wrote a WorkItemMigrator -> http://larsw.codeplex.com/SourceControl/list/changesets
Good starting point for a utility you can customize for your needs. We used it to split off a 100 or so work items to a new project. Here's the program I ended up with. Modify the query to subset the items to migrate.
namespace WorkItemMigrator
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using Microsoft.TeamFoundation.Client;
using Microsoft.TeamFoundation.Framework.Common;
using Microsoft.TeamFoundation.Server;
using Microsoft.TeamFoundation.WorkItemTracking.Client;
class Program
{
#region Members
private static readonly Dictionary<Uri, TfsTeamProjectCollection> Collections = new Dictionary<Uri, TfsTeamProjectCollection>();
private static readonly Uri SourceCollectionUri = new Uri("http://your.domain.com:8080/tfs/DefaultCollection");
private static readonly Uri TargetCollectionUri = new Uri("http://your.domain.com:8080/tfs/DefaultCollection");
private const String Areas = "ProjectModelHierarchy";
private const string Iterations = "ProjectLifecycle";
private const string TargetProjectName = "TargetProject";
private const string MicrosoftVstsCommonStackRankFieldName = "Microsoft.VSTS.Common.StackRank";
private const string MicrosoftVstsCommonPriority = "Microsoft.VSTS.Common.Priority";
private const string TargetWorkItemType = "User Story";
private const string Wiql = "SELECT [System.Id], [System.State], [System.Title], [System.AssignedTo], [System.WorkItemType], [Microsoft.VSTS.Common.Priority], " +
"[System.IterationPath], [System.AreaPath], [System.History], [System.Description] " +
"FROM WorkItems WHERE [System.TeamProject] = 'SourceProject' AND " +
"[System.State] = 'Active' " +
"ORDER BY [System.Id]";
private static WorkItemTypeCollection WorkItemTypes;
private static Dictionary<int, int> WorkItemIdMap = new Dictionary<int, int>();
#endregion
static void Main()
{
var createAreasAndIterations = GetRunMode();
var sourceWorkItemStore = GetSourceWorkItemStore();
var sourceWorkItems = sourceWorkItemStore.Query(Wiql);
var targetWorkItemStore = GetTargetWorkItemStore();
var targetProject = targetWorkItemStore.Projects[TargetProjectName];
WorkItemTypes = targetProject.WorkItemTypes;
foreach (WorkItem sourceWorkItem in sourceWorkItems)
{
if (createAreasAndIterations)
{
Console.WriteLine();
EnsureThatStructureExists(TargetProjectName, Areas, sourceWorkItem.AreaPath.Substring(sourceWorkItem.AreaPath.IndexOf("\\") + 1));
EnsureThatStructureExists(TargetProjectName, Iterations, sourceWorkItem.IterationPath.Substring(sourceWorkItem.IterationPath.IndexOf("\\") + 1));
}
else
{
MigrateWorkItem(sourceWorkItem);
}
}
if (!createAreasAndIterations)
{
var query = from WorkItem wi in sourceWorkItems where wi.Links.Count > 0 select wi;
foreach (WorkItem sourceWorkItem in query)
{
LinkRelatedItems(targetWorkItemStore, sourceWorkItem);
}
}
TextWriter tw = File.CreateText(#"C:\temp\TFS_MigratedItems.csv");
tw.WriteLine("SourceId,TargetId");
foreach (var entry in WorkItemIdMap)
{
tw.WriteLine(entry.Key + "," + entry.Value);
}
tw.Close();
Console.WriteLine();
Console.WriteLine("Done! Have a nice day.");
Console.ReadLine();
}
private static bool GetRunMode()
{
bool createAreasAndIterations;
while (true)
{
Console.Write("Create [A]reas/Iterations or [M]igrate (Ctrl-C to quit)?: ");
var command = Console.ReadLine().ToUpper().Trim();
if (command == "A")
{
createAreasAndIterations = true;
break;
}
if (command == "M")
{
createAreasAndIterations = false;
break;
}
Console.WriteLine("Unknown command " + command + " - try again.");
}
return createAreasAndIterations;
}
private static void MigrateWorkItem(WorkItem sourceWorkItem)
{
var targetWIT = WorkItemTypes[sourceWorkItem.Type.Name];
var newWorkItem = targetWIT.NewWorkItem();
//var newWorkItem = targetWorkItemType.NewWorkItem();
// Description (Task) / Steps to reproduce (Bug)
if (sourceWorkItem.Type.Name != "Bug")
{
newWorkItem.Description = sourceWorkItem.Description;
}
else
{
newWorkItem.Fields["Microsoft.VSTS.TCM.ReproSteps"].Value = sourceWorkItem.Description;
}
// History
newWorkItem.History = sourceWorkItem.History;
// Title
newWorkItem.Title = sourceWorkItem.Title;
// Assigned To
newWorkItem.Fields[CoreField.AssignedTo].Value = sourceWorkItem.Fields[CoreField.AssignedTo].Value;
// Stack Rank - Priority
newWorkItem.Fields[MicrosoftVstsCommonPriority].Value = sourceWorkItem.Fields[MicrosoftVstsCommonPriority].Value;
// Area Path
newWorkItem.AreaPath = FormatPath(TargetProjectName, sourceWorkItem.AreaPath);
// Iteration Path
newWorkItem.IterationPath = FormatPath(TargetProjectName, sourceWorkItem.IterationPath);
// Activity
if (sourceWorkItem.Type.Name == "Task")
{
newWorkItem.Fields["Microsoft.VSTS.Common.Activity"].Value = sourceWorkItem.Fields["Microsoft.VSTS.Common.Discipline"].Value;
}
// State
//newWorkItem.State = sourceWorkItem.State;
// Reason
//newWorkItem.Reason = sourceWorkItem.Reason;
// build a usable rendition of prior revision history
RevisionCollection revisions = sourceWorkItem.Revisions;
var query = from Revision r in revisions orderby r.Fields["Changed Date"].Value descending select r;
StringBuilder sb = new StringBuilder(String.Format("Migrated from work item {0}<BR />\n", sourceWorkItem.Id));
foreach (Revision revision in query)
{
String history = (String)revision.Fields["History"].Value;
if (!String.IsNullOrEmpty(history))
{
foreach (Field f in revision.Fields)
{
if (!Object.Equals(f.Value, f.OriginalValue))
{
if (f.Name == "History")
{
string notation = string.Empty;
if (revision.Fields["State"].OriginalValue != revision.Fields["State"].Value)
{
notation = String.Format("({0} to {1})", revision.Fields["State"].OriginalValue, revision.Fields["State"].Value);
}
//Console.WriteLine("<STRONG>{0} Edited {3} by {1}</STRONG><BR />\n{2}", revision.Fields["Changed Date"].Value.ToString(), revision.Fields["Changed By"].Value.ToString(), f.Value, notation);
sb.Append(String.Format("<STRONG>{0} Edited {3} by {1}</STRONG><BR />\n{2}<BR />\n", revision.Fields["Changed Date"].Value.ToString(), revision.Fields["Changed By"].Value.ToString(), f.Value, notation));
}
}
}
//Console.WriteLine("Revision {0}: ", revision.Fields["Rev"].Value);
//Console.WriteLine(" ChangedDate: " + revision.Fields["ChangedDate"].Value);
//Console.WriteLine(" History: " + sb.ToString());
}
}
newWorkItem.History = sb.ToString();
// Attachments
for (var i = 0; i < sourceWorkItem.AttachedFileCount; i++)
{
CopyAttachment(sourceWorkItem.Attachments[i], newWorkItem);
}
// Validate before save
if (!newWorkItem.IsValid())
{
var reasons = newWorkItem.Validate();
Console.WriteLine(string.Format("Could not validate new work item (old id: {0}).", sourceWorkItem.Id));
foreach (Field reason in reasons)
{
Console.WriteLine("Field: " + reason.Name + ", Status: " + reason.Status + ", Value: " + reason.Value);
}
}
else
{
Console.Write("[" + sourceWorkItem.Id + "] " + newWorkItem.Title);
try
{
newWorkItem.Save(SaveFlags.None);
Console.ForegroundColor = ConsoleColor.Cyan;
Console.WriteLine(string.Format(" [saved: {0}]", newWorkItem.Id));
WorkItemIdMap.Add(sourceWorkItem.Id, newWorkItem.Id);
Console.ResetColor();
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
throw;
}
}
}
private static void CopyAttachment(Attachment attachment, WorkItem newWorkItem)
{
using (var client = new WebClient())
{
client.UseDefaultCredentials = true;
client.DownloadFile(attachment.Uri, attachment.Name);
var newAttachment = new Attachment(attachment.Name, attachment.Comment);
newWorkItem.Attachments.Add(newAttachment);
}
}
private static void LinkRelatedItems(WorkItemStore targetWorkItemStore, WorkItem sourceWorkItem)
{
int newId = WorkItemIdMap[sourceWorkItem.Id];
WorkItem targetItem = targetWorkItemStore.GetWorkItem(newId);
foreach (Link l in sourceWorkItem.Links)
{
if (l is RelatedLink)
{
RelatedLink sl = l as RelatedLink;
switch (sl.ArtifactLinkType.Name)
{
case "Related Workitem":
{
if (WorkItemIdMap.ContainsKey(sl.RelatedWorkItemId))
{
int RelatedWorkItemId = WorkItemIdMap[sl.RelatedWorkItemId];
RelatedLink rl = new RelatedLink(sl.LinkTypeEnd, RelatedWorkItemId);
// !!!!
// this does not work - need to check the existing links to see if one exists already for the linked workitem.
// using contains expects the same object and that's not what I'm doing here!!!!!!
//if (!targetItem.Links.Contains(rl))
// !!!!
var query = from RelatedLink qrl in targetItem.Links where qrl.RelatedWorkItemId == RelatedWorkItemId select qrl;
if (query.Count() == 0)
{
targetItem.Links.Add(rl); ;
// Validate before save
if (!targetItem.IsValid())
{
var reasons = targetItem.Validate();
Console.WriteLine(string.Format("Could not validate work item (old id: {0}) related link id {1}.", sourceWorkItem.Id, sl.RelatedWorkItemId));
foreach (Field reason in reasons)
{
Console.WriteLine("Field: " + reason.Name + ", Status: " + reason.Status + ", Value: " + reason.Value);
}
}
else
{
try
{
targetItem.Save(SaveFlags.None);
Console.ForegroundColor = ConsoleColor.Cyan;
Console.WriteLine(string.Format(" [Updated: {0}]", targetItem.Id));
Console.ResetColor();
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
throw;
}
}
}
}
break;
}
default:
{ break; }
}
}
}
}
private static void EnsureThatStructureExists(string projectName, string structureType, string structurePath)
{
var parts = structurePath.Split('\\');
var css = GetCommonStructureService();
var projectInfo = css.GetProjectFromName(projectName);
var parentNodeUri = GetCssStructure(GetCommonStructureService(), projectInfo.Uri, structureType).Uri;
var currentPath = FormatPath(projectName, structureType == Areas ? "Area" : "Iteration");
foreach (var part in parts)
{
currentPath = FormatPath(currentPath, part);
Console.Write(currentPath);
try
{
var currentNode = css.GetNodeFromPath(currentPath);
parentNodeUri = currentNode.Uri;
Console.ForegroundColor = ConsoleColor.DarkGreen;
Console.WriteLine(" [found]");
}
catch
{
parentNodeUri = css.CreateNode(part, parentNodeUri);
Console.ForegroundColor = ConsoleColor.Green;
Console.WriteLine(" [created]");
}
Console.ResetColor();
}
}
private static string FormatPath(string currentPath, string part)
{
part = part.Substring(part.IndexOf("\\") + 1);
currentPath = string.Format(#"{0}\{1}", currentPath, part);
return currentPath;
}
private static TfsTeamProjectCollection GetProjectCollection(Uri uri)
{
TfsTeamProjectCollection collection;
if (!Collections.TryGetValue(uri, out collection))
{
collection = TfsTeamProjectCollectionFactory.GetTeamProjectCollection(uri);
collection.Connect(ConnectOptions.IncludeServices);
collection.Authenticate();
Collections.Add(uri, collection);
}
return Collections[uri];
}
private static WorkItemStore GetSourceWorkItemStore()
{
var collection = GetProjectCollection(SourceCollectionUri);
return collection.GetService<WorkItemStore>();
}
private static WorkItemStore GetTargetWorkItemStore()
{
var collection = GetProjectCollection(TargetCollectionUri);
return collection.GetService<WorkItemStore>();
}
public static NodeInfo GetCssStructure(ICommonStructureService css, String projectUri, String structureType)
{
return css.ListStructures(projectUri).FirstOrDefault(node => node.StructureType == structureType);
}
private static ICommonStructureService GetCommonStructureService()
{
var collection = GetProjectCollection(TargetCollectionUri);
return collection.GetService<ICommonStructureService>();
}
}
}

Resources