Crawler4j With Grails App - grails

I am making a crawler application in Groovy on Grails. I am using Crawler4j and following this tutorial.
I created a new grails project
Put the BasicCrawlController.groovy file in controllers->package
Did not create any view because I expected on doing run-app, my crawled data would appear in my crawlStorageFolder (please correct me if my understanding is flawed)
After that I just ran the application by doing run-app but I didn't see any crawling data anywhere.
Am I right in expecting some file to be created at the crawlStorageFolder location that I have given as C:/crawl/crawler4jStorage?
Do I need to create any view for this?
If I want to invoke this crawler controller from some other view on click of a submit button of a form, can I just write <g:form name="submitWebsite" url="[controller:'BasicCrawlController ']">?
I asked this because I do not have any method in this controller, so is it the right way to invoke this controller?
My code is as follows:
//All necessary imports
public class BasicCrawlController {
static main(args) throws Exception {
String crawlStorageFolder = "C:/crawl/crawler4jStorage";
int numberOfCrawlers = 1;
//int maxDepthOfCrawling = -1; default
CrawlConfig config = new CrawlConfig();
config.setCrawlStorageFolder(crawlStorageFolder);
config.setPolitenessDelay(1000);
config.setMaxPagesToFetch(100);
config.setResumableCrawling(false);
PageFetcher pageFetcher = new PageFetcher(config);
RobotstxtConfig robotstxtConfig = new RobotstxtConfig();
RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher);
CrawlController controller = new CrawlController(config, pageFetcher, robotstxtServer);
controller.addSeed("http://en.wikipedia.org/wiki/Web_crawler")
controller.start(BasicCrawler.class, 1);
}
}
class BasicCrawler extends WebCrawler {
final static Pattern FILTERS = Pattern
.compile(".*(\\.(css|js|bmp|gif|jpe?g"+ "|png|tiff?|mid|mp2|mp3|mp4" +
"|wav|avi|mov|mpeg|ram|m4v|pdf" +"|rm|smil|wmv|swf|wma|zip|rar|gz))\$")
/**
* You should implement this function to specify whether the given url
* should be crawled or not (based on your crawling logic).
*/
#Override
boolean shouldVisit(WebURL url) {
String href = url.getURL().toLowerCase()
!FILTERS.matcher(href).matches() && href.startsWith("http://en.wikipedia.org/wiki/Web_crawler/")
}
/**
* This function is called when a page is fetched and ready to be processed
* by your program.
*/
#Override
void visit(Page page) {
int docid = page.getWebURL().getDocid()
String url = page.getWebURL().getURL()
String domain = page.getWebURL().getDomain()
String path = page.getWebURL().getPath()
String subDomain = page.getWebURL().getSubDomain()
String parentUrl = page.getWebURL().getParentUrl()
String anchor = page.getWebURL().getAnchor()
println("Docid: ${docid} ")
println("URL: ${url} ")
println("Domain: '${domain}'")
println("Sub-domain: ' ${subDomain}'")
println("Path: '${path}'")
println("Parent page:${parentUrl} ")
println("Anchor text: ${anchor} " )
if (page.getParseData() instanceof HtmlParseData) {
HtmlParseData htmlParseData = (HtmlParseData) page.getParseData()
String text = htmlParseData.getText()
String html = htmlParseData.getHtml()
List<WebURL> links = htmlParseData.getOutgoingUrls()
println("Text length: " + text.length())
println("Html length: " + html.length())
println("Number of outgoing links: " + links.size())
}
Header[] responseHeaders = page.getFetchResponseHeaders()
if (responseHeaders != null) {
println("Response headers:")
for (Header header : responseHeaders) {
println("\t ${header.getName()} : ${header.getValue()}")
}
}
println("=============")
}
}

I'll try to translate your code into a Grails standard.
Use this under grails-app/controller
class BasicCrawlController {
def index() {
String crawlStorageFolder = "C:/crawl/crawler4jStorage";
int numberOfCrawlers = 1;
//int maxDepthOfCrawling = -1; default
CrawlConfig crawlConfig = new CrawlConfig();
crawlConfig.setCrawlStorageFolder(crawlStorageFolder);
crawlConfig.setPolitenessDelay(1000);
crawlConfig.setMaxPagesToFetch(100);
crawlConfig.setResumableCrawling(false);
PageFetcher pageFetcher = new PageFetcher(crawlConfig);
RobotstxtConfig robotstxtConfig = new RobotstxtConfig();
RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher);
CrawlController controller = new CrawlController(crawlConfig, pageFetcher, robotstxtServer);
controller.addSeed("http://en.wikipedia.org/wiki/Web_crawler")
controller.start(BasicCrawler.class, 1);
render "done crawling"
}
}
Use this under src/groovy
class BasicCrawler extends WebCrawler {
final static Pattern FILTERS = Pattern
.compile(".*(\\.(css|js|bmp|gif|jpe?g"+ "|png|tiff?|mid|mp2|mp3|mp4" +
"|wav|avi|mov|mpeg|ram|m4v|pdf" +"|rm|smil|wmv|swf|wma|zip|rar|gz))\$")
/**
* You should implement this function to specify whether the given url
* should be crawled or not (based on your crawling logic).
*/
#Override
boolean shouldVisit(WebURL url) {
String href = url.getURL().toLowerCase()
!FILTERS.matcher(href).matches() && href.startsWith("http://en.wikipedia.org/wiki/Web_crawler/")
}
/**
* This function is called when a page is fetched and ready to be processed
* by your program.
*/
#Override
void visit(Page page) {
int docid = page.getWebURL().getDocid()
String url = page.getWebURL().getURL()
String domain = page.getWebURL().getDomain()
String path = page.getWebURL().getPath()
String subDomain = page.getWebURL().getSubDomain()
String parentUrl = page.getWebURL().getParentUrl()
String anchor = page.getWebURL().getAnchor()
println("Docid: ${docid} ")
println("URL: ${url} ")
println("Domain: '${domain}'")
println("Sub-domain: ' ${subDomain}'")
println("Path: '${path}'")
println("Parent page:${parentUrl} ")
println("Anchor text: ${anchor} " )
if (page.getParseData() instanceof HtmlParseData) {
HtmlParseData htmlParseData = (HtmlParseData) page.getParseData()
String text = htmlParseData.getText()
String html = htmlParseData.getHtml()
List<WebURL> links = htmlParseData.getOutgoingUrls()
println("Text length: " + text.length())
println("Html length: " + html.length())
println("Number of outgoing links: " + links.size())
}
Header[] responseHeaders = page.getFetchResponseHeaders()
if (responseHeaders != null) {
println("Response headers:")
for (Header header : responseHeaders) {
println("\t ${header.getName()} : ${header.getValue()}")
}
}
println("=============")
}
}

Related

Mp4 cannot play on iOS by request spring mvc server resource, but working on Nginx access mp4 file directly

I have two scenario when play mp4 file on iOS devices.
MP4 access by Nginx is working:
Put mp4 file in /html and using following configure, then iOS devices and chrome browser can play mp4 files properly.
server {
listen 127.0.0.1:80 default_server;
location ~ ^/storage\/*.mp4 {
root html;
}
}
MP4 access by Tomcat Spring MVC is not working: When I request by Spring mvc restful API and return ResponseEntity contains Resource Object, in chrome browser will receive and play mp4 properly. But iOS devices not working
#GetMapping(value = "/storage/{filename:.+}")
#ResponseBody
public ResponseEntity<org.springframework.core.io.Resource> accessStorageFile(HttpServletResponse response, #PathVariable String filename) throws IOException {
org.springframework.core.io.Resource resource = storageUtil.loadAsResource(filename);
InputStream inputStream = resource.getInputStream();
InputStreamResource inputStreamResource = new InputStreamResource(inputStream);
String contentType = FileTypeMap.getDefaultFileTypeMap().getContentType(resource.getFile());
contentType = resource.getFilename().contains(".mp4") ? "video/mp4" : contentType;
response.setContentType(contentType);
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.setContentType(MediaType.valueOf(contentType));
responseHeaders.setContentLength(resource.getFile().length());
return new ResponseEntity<>(inputStreamResource, responseHeaders, HttpStatus.OK);
}
import java.io.BufferedInputStream; import java.io.File; import
java.io.IOException; import java.io.InputStream; import
java.io.OutputStream; import java.nio.file.Files; import
java.nio.file.Path; import java.nio.file.Paths; import
java.nio.file.attribute.FileTime; import java.time.LocalDateTime;
import java.time.ZoneId; import java.time.ZoneOffset; import
java.util.ArrayList; import java.util.Arrays; import java.util.List;
import javax.servlet.ServletOutputStream; import
javax.servlet.http.HttpServletRequest; import
javax.servlet.http.HttpServletResponse; import
org.springframework.util.StringUtils;
/** * * #author David 1 */ public class MultipartFileSender {
private static final int DEFAULT_BUFFER_SIZE = 20480; // ..bytes = 20KB.
private static final long DEFAULT_EXPIRE_TIME = 604800000L; // ..ms = 1 week.
private static final String MULTIPART_BOUNDARY = "MULTIPART_BYTERANGES";
Path filepath;
HttpServletRequest request;
HttpServletResponse response;
public MultipartFileSender() {
}
public static MultipartFileSender fromPath(Path path) {
return new MultipartFileSender().setFilepath(path);
}
public static MultipartFileSender fromFile(File file) {
return new MultipartFileSender().setFilepath(file.toPath());
}
public static MultipartFileSender fromURIString(String uri) {
return new MultipartFileSender().setFilepath(Paths.get(uri));
}
//** internal setter **//
private MultipartFileSender setFilepath(Path filepath) {
this.filepath = filepath;
return this;
}
public MultipartFileSender with(HttpServletRequest httpRequest) {
request = httpRequest;
return this;
}
public MultipartFileSender with(HttpServletResponse httpResponse) {
response = httpResponse;
return this;
}
public void serveResource() throws Exception {
if (response == null || request == null) {
return;
}
if (!Files.exists(filepath)) {
System.out.println("File doesn't exist at URI : {" + filepath.toAbsolutePath().toString() + "}");
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
Long length = Files.size(filepath);
String fileName = filepath.getFileName().toString();
FileTime lastModifiedObj = Files.getLastModifiedTime(filepath);
if (StringUtils.isEmpty(fileName) || lastModifiedObj == null) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
long lastModified = LocalDateTime.ofInstant(lastModifiedObj.toInstant(),
ZoneId.of(ZoneOffset.systemDefault().getId())).toEpochSecond(ZoneOffset.UTC);
String contentType = "video/mp4";
// Validate request headers for caching ---------------------------------------------------
// If-None-Match header should contain "*" or ETag. If so, then return 304.
String ifNoneMatch = request.getHeader("If-None-Match");
if (ifNoneMatch != null && HttpUtils.matches(ifNoneMatch, fileName)) {
response.setHeader("ETag", fileName); // Required in 304.
response.sendError(HttpServletResponse.SC_NOT_MODIFIED);
return;
}
// If-Modified-Since header should be greater than LastModified. If so, then return 304.
// This header is ignored if any If-None-Match header is specified.
long ifModifiedSince = request.getDateHeader("If-Modified-Since");
if (ifNoneMatch == null && ifModifiedSince != -1 && ifModifiedSince + 1000 > lastModified) {
response.setHeader("ETag", fileName); // Required in 304.
response.sendError(HttpServletResponse.SC_NOT_MODIFIED);
return;
}
// Validate request headers for resume ----------------------------------------------------
// If-Match header should contain "*" or ETag. If not, then return 412.
String ifMatch = request.getHeader("If-Match");
if (ifMatch != null && !HttpUtils.matches(ifMatch, fileName)) {
response.sendError(HttpServletResponse.SC_PRECONDITION_FAILED);
return;
}
// If-Unmodified-Since header should be greater than LastModified. If not, then return 412.
long ifUnmodifiedSince = request.getDateHeader("If-Unmodified-Since");
if (ifUnmodifiedSince != -1 && ifUnmodifiedSince + 1000 <= lastModified) {
response.sendError(HttpServletResponse.SC_PRECONDITION_FAILED);
return;
}
// Validate and process range -------------------------------------------------------------
// Prepare some variables. The full Range represents the complete file.
Range full = new Range(0, length - 1, length);
List<Range> ranges = new ArrayList<>();
// Validate and process Range and If-Range headers.
String range = request.getHeader("Range");
if (range != null) {
// Range header should match format "bytes=n-n,n-n,n-n...". If not, then return 416.
if (!range.matches("^bytes=\\d*-\\d*(,\\d*-\\d*)*$")) {
response.setHeader("Content-Range", "bytes */" + length); // Required in 416.
response.sendError(HttpServletResponse.SC_REQUESTED_RANGE_NOT_SATISFIABLE);
return;
}
String ifRange = request.getHeader("If-Range");
if (ifRange != null && !ifRange.equals(fileName)) {
try {
long ifRangeTime = request.getDateHeader("If-Range"); // Throws IAE if invalid.
if (ifRangeTime != -1) {
ranges.add(full);
}
} catch (IllegalArgumentException ignore) {
ranges.add(full);
}
}
// If any valid If-Range header, then process each part of byte range.
if (ranges.isEmpty()) {
for (String part : range.substring(6).split(",")) {
// Assuming a file with length of 100, the following examples returns bytes at:
// 50-80 (50 to 80), 40- (40 to length=100), -20 (length-20=80 to length=100).
long start = Range.sublong(part, 0, part.indexOf("-"));
long end = Range.sublong(part, part.indexOf("-") + 1, part.length());
if (start == -1) {
start = length - end;
end = length - 1;
} else if (end == -1 || end > length - 1) {
end = length - 1;
}
// Check if Range is syntactically valid. If not, then return 416.
if (start > end) {
response.setHeader("Content-Range", "bytes */" + length); // Required in 416.
response.sendError(HttpServletResponse.SC_REQUESTED_RANGE_NOT_SATISFIABLE);
return;
}
// Add range.
ranges.add(new Range(start, end, length));
}
}
}
// Prepare and initialize response --------------------------------------------------------
// Get content type by file name and set content disposition.
String disposition = "inline";
// If content type is unknown, then set the default value.
// For all content types, see: http://www.w3schools.com/media/media_mimeref.asp
// To add new content types, add new mime-mapping entry in web.xml.
if (contentType == null) {
contentType = "application/octet-stream";
} else if (!contentType.startsWith("image")) {
// Else, expect for images, determine content disposition. If content type is supported by
// the browser, then set to inline, else attachment which will pop a 'save as' dialogue.
String accept = request.getHeader("Accept");
disposition = accept != null && HttpUtils.accepts(accept, contentType) ? "inline" : "attachment";
}
System.out.println("Content-Type : {" + contentType + "}");
// Initialize response.
response.reset();
response.setBufferSize(DEFAULT_BUFFER_SIZE);
response.setHeader("Content-Type", contentType);
response.setHeader("Content-Disposition", disposition + ";filename=\"" + fileName + "\"");
System.out.println("Content-Disposition : {" + disposition + "}");
response.setHeader("Accept-Ranges", "bytes");
response.setHeader("ETag", fileName);
response.setDateHeader("Last-Modified", lastModified);
response.setDateHeader("Expires", System.currentTimeMillis() + DEFAULT_EXPIRE_TIME);
// Send requested file (part(s)) to client ------------------------------------------------
// Prepare streams.
try (InputStream input = new BufferedInputStream(Files.newInputStream(filepath));
OutputStream output = response.getOutputStream()) {
if (ranges.isEmpty() || ranges.get(0) == full) {
// Return full file.
System.out.println("Return full file");
response.setContentType(contentType);
response.setHeader("Content-Range", "bytes " + full.start + "-" + full.end + "/" + full.total);
response.setHeader("Content-Length", String.valueOf(full.length));
Range.copy(input, output, length, full.start, full.length);
} else if (ranges.size() == 1) {
// Return single part of file.
Range r = ranges.get(0);
System.out.println("Return 1 part of file : from ({" + r.start + "}) to ({" + r.end + "})");
response.setContentType(contentType);
response.setHeader("Content-Range", "bytes " + r.start + "-" + r.end + "/" + r.total);
response.setHeader("Content-Length", String.valueOf(r.length));
response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); // 206.
// Copy single part range.
Range.copy(input, output, length, r.start, r.length);
} else {
// Return multiple parts of file.
response.setContentType("multipart/byteranges; boundary=" + MULTIPART_BOUNDARY);
response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); // 206.
// Cast back to ServletOutputStream to get the easy println methods.
ServletOutputStream sos = (ServletOutputStream) output;
// Copy multi part range.
for (Range r : ranges) {
System.out.println("Return multi part of file : from ({" + r.start + "}) to ({" + r.end + "})");
// Add multipart boundary and header fields for every range.
sos.println();
sos.println("--" + MULTIPART_BOUNDARY);
sos.println("Content-Type: " + contentType);
sos.println("Content-Range: bytes " + r.start + "-" + r.end + "/" + r.total);
// Copy single part range of multi part range.
Range.copy(input, output, length, r.start, r.length);
}
// End with multipart boundary.
sos.println();
sos.println("--" + MULTIPART_BOUNDARY + "--");
}
}
}
private static class Range {
long start;
long end;
long length;
long total;
/**
* Construct a byte range.
*
* #param start Start of the byte range.
* #param end End of the byte range.
* #param total Total length of the byte source.
*/
public Range(long start, long end, long total) {
this.start = start;
this.end = end;
this.length = end - start + 1;
this.total = total;
}
public static long sublong(String value, int beginIndex, int endIndex) {
String substring = value.substring(beginIndex, endIndex);
return (substring.length() > 0) ? Long.parseLong(substring) : -1;
}
private static void copy(InputStream input, OutputStream output, long inputSize, long start, long length) throws IOException {
byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
int read;
if (inputSize == length) {
// Write full range.
while ((read = input.read(buffer)) > 0) {
output.write(buffer, 0, read);
output.flush();
}
} else {
input.skip(start);
long toRead = length;
while ((read = input.read(buffer)) > 0) {
if ((toRead -= read) > 0) {
output.write(buffer, 0, read);
output.flush();
} else {
output.write(buffer, 0, (int) toRead + read);
output.flush();
break;
}
}
}
}
}
private static class HttpUtils {
/**
* Returns true if the given accept header accepts the given value.
*
* #param acceptHeader The accept header.
* #param toAccept The value to be accepted.
* #return True if the given accept header accepts the given value.
*/
public static boolean accepts(String acceptHeader, String toAccept) {
String[] acceptValues = acceptHeader.split("\\s*(,|;)\\s*");
Arrays.sort(acceptValues);
return Arrays.binarySearch(acceptValues, toAccept) > -1
|| Arrays.binarySearch(acceptValues, toAccept.replaceAll("/.*$", "/*")) > -1
|| Arrays.binarySearch(acceptValues, "*/*") > -1;
}
/**
* Returns true if the given match header matches the given value.
*
* #param matchHeader The match header.
* #param toMatch The value to be matched.
* #return True if the given match header matches the given value.
*/
public static boolean matches(String matchHeader, String toMatch) {
String[] matchValues = matchHeader.split("\\s*,\\s*");
Arrays.sort(matchValues);
return Arrays.binarySearch(matchValues, toMatch) > -1
|| Arrays.binarySearch(matchValues, "*") > -1;
}
} }
use is in your controller and make sure you are not use #RestController
you should use #Controller, and following class in void method
#RequestMapping(method = RequestMethod.GET, value = "/getFile")
public void getFile(HttpServletRequest httpRequest, HttpServletResponse httpResponse) throws Exception {
MultipartFileSender.fromPath(Paths.get("D:\8561523973120206.mp4"))
.with(httpRequest)
.with(httpResponse)
.serveResource();
}

ASP.NET MVC Open a .xls file

I have to create a .xls file from the data displayed in a table in my page. This happens when the user clicks 'export' button. I have the following code for it and it is created okay. Now, I want to open this file in the same click. How should I open it for user to see it?
string filePath = "C:/Upload/Stats_" + viewModel.SelectedTest.ToString() + ".xls";
//write the header
using (var pw = new StreamWriter(filePath, true))
{
pw.WriteLine(String.Format("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}", "Month", "Total Users", "K",
"T", "G", "Detail", "GS",
"BI", "GHF","A"));
//write to the file
for (int i = 0; i < 12; i++)
{
pw.WriteLine(
String.Format("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}", viewModel.Months[i],
viewModel.M[i], viewModel.MKau[i],
viewModel.MTech[i], viewModel.MGew[i],
viewModel.MDet[i], viewModel.MGes[i],
viewModel.MBea[i], viewModel.MGesHf[i],viewModel.MAug[i]));
pw.Flush();
}
pw.Close();
}
Here I would like to open it.
I had the same 'requirement' to be able to export to xls, and instead I gave the client an export to csv, which will open in excel if you are on a machine with excel installed but is also available to other systems. I achieved it like this.
Create an extension method that supports .AsCsv, which was taken largely from Mike Hadlow's implementation
public static class EnumerableExtensions
{
public static string AsCsv<T>(this IEnumerable<T> items) where T : class
{
var csvBuilder = new StringBuilder();
var properties = typeof(T).GetProperties();
foreach (T item in items)
{
string line = string.Join(",", properties.Select(p => p.GetValue(item, null).ToCsvValue()).ToArray());
csvBuilder.AppendLine(line);
}
return csvBuilder.ToString();
}
private static string ToCsvValue<T>(this T item)
{
if (item is string)
{
return string.Format("\"{0}\"", item.ToString().Replace("\"", "\\\""));
}
double dummy;
if (double.TryParse(item.ToString(), out dummy))
{
return string.Format("{0}", item.ToString());
}
return string.Format("\"{0}\"", item.ToString());
}
}
Then you would need set your controller method to return a FileContentResult and have some logic like this within the controller method:
var outputModel = viewModel.ToList().Select(model => new
{
Months = model.Months
M = model.M[i],
MKau= model.MKau,
MTech = model.MTech,
MGew = model.MGew,
MDet = model.MDet,
MGes = model.MGes,
MBea= model.MBea,
MGesHf= model.MGesHf
});
string csv = "\"Month\",\"Total Users\",\"K\",\"T\",\"G\",\"Detail\",\"GS\",\"BI\",\"GHF\",\"A\""
+ System.Environment.NewLine
+ outputModel.AsCsv();
string fileName = "Stats_" + viewModel.SelectedTest.ToString() + ".csv"
return this.File(new System.Text.UTF8Encoding().GetBytes(csv), "text/csv", fileName);

Issues with Crypto.generateMac() in SalesForce APEX

We need to make a few callouts to a service that is using OAuth 1.0 and requires each request to be signed with HMAC-SHA1.
The service doesn't have any APEX client API. Thus, we have to do it manually.
Unfortunately,
EncodingUtil.base64Encode(Crypto.generateMac('hmacSHA1', Blob.valueOf(data), Blob.valueOf(key)));
returns a different string from what we expect. We have compared the output for the same input with libraries for other languages. And the output was different.
I have no problems calling out to OAuth 1.0. Here's some sample Apex for signing your request:
EDIT: Added additional code
private Map<String,String> getUrlParams(String value)
{
Map<String,String> res = new Map<String,String>();
if(value==null || value=='')
{
return res;
}
for(String s : value.split('&'))
{
List<String> kv = s.split('=');
if(kv.size()>1)
{
res.put(kv[0],kv[1]);
}
}
return res;
}
private String createBaseString(Map<String,String> oauthParams, HttpRequest req)
{
Map<String,String> p = oauthParams.clone();
if(req.getMethod().equalsIgnoreCase('post') && req.getBody()!=null && req.getHeader('Content-Type')=='application/x-www-form-urlencoded')
p.putAll(getUrlParams(req.getBody()));
String host = req.getEndpoint();
Integer n = host.indexOf('?');
if(n > -1)
{
p.putAll(getUrlParams(host.substring(n+1)));
host = host.substring(0,n);
}
List<String> keys = new List<String>();
keys.addAll(p.keySet());
keys.sort();
String s = keys.get(0)+'='+p.get(keys.get(0));
for(Integer i=1; i<keys.size(); i++)
s = s + '&' + keys.get(i) + '=' + p.get(keys.get(i));
return req.getMethod().toUpperCase() + '&' + EncodingUtil.urlEncode(host, 'UTF-8') + '&' + EncodingUtil.urlEncode(s, 'UTF-8');
}
public void sign(HttpRequest req)
{
nonce = String.valueOf(Crypto.getRandomLong());
timestamp = String.valueOf(DateTime.now().getTime() / 1000);
refreshParameters();
String s = createBaseString(parameters, req);
Blob sig = Crypto.generateMac('HmacSHA1', Blob.valueOf(s),
Blob.valueOf(consumerSecret+'&'+ (tokenSecret!=null ? tokenSecret : '')));
signature = EncodingUtil.urlEncode(EncodingUtil.base64encode(sig), 'UTF-8');
String header = 'OAuth ';
for (String key : parameters.keySet())
{
header = header + key + '="'+parameters.get(key)+'", ';
}
header = header + 'oauth_signature="'+signature+'"';
req.setHeader('Authorization',header);
}
This might be reaching, but could there be a case-sensitivity issue? Notice I'm calling 'HmacSHA1' not 'hmacSHA1'

entered values are being cleared in gsp

I am entering values(URLs) in the text box and when I click the next button, validation runs whether the entered url values are valid or not.
if the urls are not valid then i display the error as below
if(valid == false){
this.errors.reject("Incorrect URL is entered for "+countries.get(countryCode)+" - please ensure to use a correct URL for More Games.")
return [];
}
Once the error is shown, its clearing the entered values and need to enter the values again. How can I restrict clearing the values(i think the page is getting loaded again) or to restrict the page to load again.
here is the command -
class DeliveryMoreGamesURLCommand implements Command {
private static final LOG = LogFactory.getLog(DeliveryController.class)
def wrapperService = ApplicationHolder.application.getMainContext().getBean("wrapperService");
def customerService = ApplicationHolder.application.getMainContext().getBean("customerService");
def countryService = ApplicationHolder.application.getMainContext().getBean("countryService");
def helperService = ApplicationHolder.application.getMainContext().getBean("helperService");
def ascService = ApplicationHolder.application.getMainContext().getBean("ascService");
Hashtable<String, String> countries;
public LinkedHashMap<String, Object> preProcess(sessionObject, params, request) {
Delivery delivery = (Delivery) sessionObject;
def customer = Customer.get(delivery.customerId);
def contentProvider = ContentProvider.get(delivery.contentProviderId);
def deployment = Deployment.get(delivery.deploymentId);
def operatingSystem = OperatingSystem.get(delivery.operatingSystemId);
countries = wrapperService.getCountries(deployment, operatingSystem);
def sortedCountries = countries.sort { a, b -> a.value <=> b.value };
def urls = ascService.getMoreGamesUrlsPerTemplates(deployment, operatingSystem);
def moreGamesUrls = new Hashtable<String,String>();
countries.each { country ->
String countryCode = countryService.getTwoLetterCountryAbbreviation(country.key);
String url = customerService.getMoreGamesUrl(customer, contentProvider, countryCode);
if ("".equals(url)) {
url = urls.get(country.key);
if (url == null) {
url = "";
}
}
moreGamesUrls.put(country.key, url); // We need to use the existing country code if the channels are deployed with three letter country codes
}
return [command: this, countries: sortedCountries, moreGamesUrls: moreGamesUrls]
}
public LinkedHashMap<String, Object> postProcess(sessionObject, params, request) {
Delivery delivery = (Delivery) sessionObject;
def urls = params.gamesUrls;
LOG.debug("urls from gsp :"+urls)
try{
urls.eachWithIndex { u, i ->
String countryCode = u.key;
String url = urls["${u.key}"]
if(url != ""){
Boolean valid = helperService.isURLValid(url)
if(valid == false){
this.errors.reject("Incorrect URL is entered for "+countries.get(countryCode)+" - please ensure to use a correct URL for More Games.")
return [];
}
}
}
}catch (Exception ex) {
logger.warn("Incorrect URL is entered", ex)
return [];
}
def moreGamesUrls = new Hashtable<String, String>();
urls.eachWithIndex { u, i ->
String countryCode = u.key;
String url = urls["${u.key}"]
moreGamesUrls.put(countryCode, url);
}
delivery.countries = countries;
delivery.moreGamesUrls = moreGamesUrls;
LOG.debug("moreGamesUrls after edit=${delivery.moreGamesUrls}");
return null;
}
}
from the command preprocess the data will be rendered and after clicking the next button, the postprocess will be invoked and the validation for the url...
Resolved this issue by implementing moreGamesUrls as a global variable(which stored the values even after the error is thrown)

Moving a work item between projects in TFS

Is it possible to move a work item from one project to another inside TFS? I’ve seen a copy option, but no move. Also, if it is possible, what’s the implication for any of the WI history?
I found this article from 2008 that seem to say it's not, but I wondered if there'd been any progress since then.
It isn't possible to move, just copy. The way we do it, is we do the copy, link the original, then close the original as obsolete. You could also create the copy and TF Destroy the original, but you will lose all history.
If you wanted to, you could get very fancy and create your own "move" utility that copies the workitem and all of the history, then closes out (or destroys) the old one. Seems like overkill for something that you probably shouldn't need to do all that often.
Lars Wilhelmsen wrote a WorkItemMigrator -> http://larsw.codeplex.com/SourceControl/list/changesets
Good starting point for a utility you can customize for your needs. We used it to split off a 100 or so work items to a new project. Here's the program I ended up with. Modify the query to subset the items to migrate.
namespace WorkItemMigrator
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using Microsoft.TeamFoundation.Client;
using Microsoft.TeamFoundation.Framework.Common;
using Microsoft.TeamFoundation.Server;
using Microsoft.TeamFoundation.WorkItemTracking.Client;
class Program
{
#region Members
private static readonly Dictionary<Uri, TfsTeamProjectCollection> Collections = new Dictionary<Uri, TfsTeamProjectCollection>();
private static readonly Uri SourceCollectionUri = new Uri("http://your.domain.com:8080/tfs/DefaultCollection");
private static readonly Uri TargetCollectionUri = new Uri("http://your.domain.com:8080/tfs/DefaultCollection");
private const String Areas = "ProjectModelHierarchy";
private const string Iterations = "ProjectLifecycle";
private const string TargetProjectName = "TargetProject";
private const string MicrosoftVstsCommonStackRankFieldName = "Microsoft.VSTS.Common.StackRank";
private const string MicrosoftVstsCommonPriority = "Microsoft.VSTS.Common.Priority";
private const string TargetWorkItemType = "User Story";
private const string Wiql = "SELECT [System.Id], [System.State], [System.Title], [System.AssignedTo], [System.WorkItemType], [Microsoft.VSTS.Common.Priority], " +
"[System.IterationPath], [System.AreaPath], [System.History], [System.Description] " +
"FROM WorkItems WHERE [System.TeamProject] = 'SourceProject' AND " +
"[System.State] = 'Active' " +
"ORDER BY [System.Id]";
private static WorkItemTypeCollection WorkItemTypes;
private static Dictionary<int, int> WorkItemIdMap = new Dictionary<int, int>();
#endregion
static void Main()
{
var createAreasAndIterations = GetRunMode();
var sourceWorkItemStore = GetSourceWorkItemStore();
var sourceWorkItems = sourceWorkItemStore.Query(Wiql);
var targetWorkItemStore = GetTargetWorkItemStore();
var targetProject = targetWorkItemStore.Projects[TargetProjectName];
WorkItemTypes = targetProject.WorkItemTypes;
foreach (WorkItem sourceWorkItem in sourceWorkItems)
{
if (createAreasAndIterations)
{
Console.WriteLine();
EnsureThatStructureExists(TargetProjectName, Areas, sourceWorkItem.AreaPath.Substring(sourceWorkItem.AreaPath.IndexOf("\\") + 1));
EnsureThatStructureExists(TargetProjectName, Iterations, sourceWorkItem.IterationPath.Substring(sourceWorkItem.IterationPath.IndexOf("\\") + 1));
}
else
{
MigrateWorkItem(sourceWorkItem);
}
}
if (!createAreasAndIterations)
{
var query = from WorkItem wi in sourceWorkItems where wi.Links.Count > 0 select wi;
foreach (WorkItem sourceWorkItem in query)
{
LinkRelatedItems(targetWorkItemStore, sourceWorkItem);
}
}
TextWriter tw = File.CreateText(#"C:\temp\TFS_MigratedItems.csv");
tw.WriteLine("SourceId,TargetId");
foreach (var entry in WorkItemIdMap)
{
tw.WriteLine(entry.Key + "," + entry.Value);
}
tw.Close();
Console.WriteLine();
Console.WriteLine("Done! Have a nice day.");
Console.ReadLine();
}
private static bool GetRunMode()
{
bool createAreasAndIterations;
while (true)
{
Console.Write("Create [A]reas/Iterations or [M]igrate (Ctrl-C to quit)?: ");
var command = Console.ReadLine().ToUpper().Trim();
if (command == "A")
{
createAreasAndIterations = true;
break;
}
if (command == "M")
{
createAreasAndIterations = false;
break;
}
Console.WriteLine("Unknown command " + command + " - try again.");
}
return createAreasAndIterations;
}
private static void MigrateWorkItem(WorkItem sourceWorkItem)
{
var targetWIT = WorkItemTypes[sourceWorkItem.Type.Name];
var newWorkItem = targetWIT.NewWorkItem();
//var newWorkItem = targetWorkItemType.NewWorkItem();
// Description (Task) / Steps to reproduce (Bug)
if (sourceWorkItem.Type.Name != "Bug")
{
newWorkItem.Description = sourceWorkItem.Description;
}
else
{
newWorkItem.Fields["Microsoft.VSTS.TCM.ReproSteps"].Value = sourceWorkItem.Description;
}
// History
newWorkItem.History = sourceWorkItem.History;
// Title
newWorkItem.Title = sourceWorkItem.Title;
// Assigned To
newWorkItem.Fields[CoreField.AssignedTo].Value = sourceWorkItem.Fields[CoreField.AssignedTo].Value;
// Stack Rank - Priority
newWorkItem.Fields[MicrosoftVstsCommonPriority].Value = sourceWorkItem.Fields[MicrosoftVstsCommonPriority].Value;
// Area Path
newWorkItem.AreaPath = FormatPath(TargetProjectName, sourceWorkItem.AreaPath);
// Iteration Path
newWorkItem.IterationPath = FormatPath(TargetProjectName, sourceWorkItem.IterationPath);
// Activity
if (sourceWorkItem.Type.Name == "Task")
{
newWorkItem.Fields["Microsoft.VSTS.Common.Activity"].Value = sourceWorkItem.Fields["Microsoft.VSTS.Common.Discipline"].Value;
}
// State
//newWorkItem.State = sourceWorkItem.State;
// Reason
//newWorkItem.Reason = sourceWorkItem.Reason;
// build a usable rendition of prior revision history
RevisionCollection revisions = sourceWorkItem.Revisions;
var query = from Revision r in revisions orderby r.Fields["Changed Date"].Value descending select r;
StringBuilder sb = new StringBuilder(String.Format("Migrated from work item {0}<BR />\n", sourceWorkItem.Id));
foreach (Revision revision in query)
{
String history = (String)revision.Fields["History"].Value;
if (!String.IsNullOrEmpty(history))
{
foreach (Field f in revision.Fields)
{
if (!Object.Equals(f.Value, f.OriginalValue))
{
if (f.Name == "History")
{
string notation = string.Empty;
if (revision.Fields["State"].OriginalValue != revision.Fields["State"].Value)
{
notation = String.Format("({0} to {1})", revision.Fields["State"].OriginalValue, revision.Fields["State"].Value);
}
//Console.WriteLine("<STRONG>{0} Edited {3} by {1}</STRONG><BR />\n{2}", revision.Fields["Changed Date"].Value.ToString(), revision.Fields["Changed By"].Value.ToString(), f.Value, notation);
sb.Append(String.Format("<STRONG>{0} Edited {3} by {1}</STRONG><BR />\n{2}<BR />\n", revision.Fields["Changed Date"].Value.ToString(), revision.Fields["Changed By"].Value.ToString(), f.Value, notation));
}
}
}
//Console.WriteLine("Revision {0}: ", revision.Fields["Rev"].Value);
//Console.WriteLine(" ChangedDate: " + revision.Fields["ChangedDate"].Value);
//Console.WriteLine(" History: " + sb.ToString());
}
}
newWorkItem.History = sb.ToString();
// Attachments
for (var i = 0; i < sourceWorkItem.AttachedFileCount; i++)
{
CopyAttachment(sourceWorkItem.Attachments[i], newWorkItem);
}
// Validate before save
if (!newWorkItem.IsValid())
{
var reasons = newWorkItem.Validate();
Console.WriteLine(string.Format("Could not validate new work item (old id: {0}).", sourceWorkItem.Id));
foreach (Field reason in reasons)
{
Console.WriteLine("Field: " + reason.Name + ", Status: " + reason.Status + ", Value: " + reason.Value);
}
}
else
{
Console.Write("[" + sourceWorkItem.Id + "] " + newWorkItem.Title);
try
{
newWorkItem.Save(SaveFlags.None);
Console.ForegroundColor = ConsoleColor.Cyan;
Console.WriteLine(string.Format(" [saved: {0}]", newWorkItem.Id));
WorkItemIdMap.Add(sourceWorkItem.Id, newWorkItem.Id);
Console.ResetColor();
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
throw;
}
}
}
private static void CopyAttachment(Attachment attachment, WorkItem newWorkItem)
{
using (var client = new WebClient())
{
client.UseDefaultCredentials = true;
client.DownloadFile(attachment.Uri, attachment.Name);
var newAttachment = new Attachment(attachment.Name, attachment.Comment);
newWorkItem.Attachments.Add(newAttachment);
}
}
private static void LinkRelatedItems(WorkItemStore targetWorkItemStore, WorkItem sourceWorkItem)
{
int newId = WorkItemIdMap[sourceWorkItem.Id];
WorkItem targetItem = targetWorkItemStore.GetWorkItem(newId);
foreach (Link l in sourceWorkItem.Links)
{
if (l is RelatedLink)
{
RelatedLink sl = l as RelatedLink;
switch (sl.ArtifactLinkType.Name)
{
case "Related Workitem":
{
if (WorkItemIdMap.ContainsKey(sl.RelatedWorkItemId))
{
int RelatedWorkItemId = WorkItemIdMap[sl.RelatedWorkItemId];
RelatedLink rl = new RelatedLink(sl.LinkTypeEnd, RelatedWorkItemId);
// !!!!
// this does not work - need to check the existing links to see if one exists already for the linked workitem.
// using contains expects the same object and that's not what I'm doing here!!!!!!
//if (!targetItem.Links.Contains(rl))
// !!!!
var query = from RelatedLink qrl in targetItem.Links where qrl.RelatedWorkItemId == RelatedWorkItemId select qrl;
if (query.Count() == 0)
{
targetItem.Links.Add(rl); ;
// Validate before save
if (!targetItem.IsValid())
{
var reasons = targetItem.Validate();
Console.WriteLine(string.Format("Could not validate work item (old id: {0}) related link id {1}.", sourceWorkItem.Id, sl.RelatedWorkItemId));
foreach (Field reason in reasons)
{
Console.WriteLine("Field: " + reason.Name + ", Status: " + reason.Status + ", Value: " + reason.Value);
}
}
else
{
try
{
targetItem.Save(SaveFlags.None);
Console.ForegroundColor = ConsoleColor.Cyan;
Console.WriteLine(string.Format(" [Updated: {0}]", targetItem.Id));
Console.ResetColor();
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
throw;
}
}
}
}
break;
}
default:
{ break; }
}
}
}
}
private static void EnsureThatStructureExists(string projectName, string structureType, string structurePath)
{
var parts = structurePath.Split('\\');
var css = GetCommonStructureService();
var projectInfo = css.GetProjectFromName(projectName);
var parentNodeUri = GetCssStructure(GetCommonStructureService(), projectInfo.Uri, structureType).Uri;
var currentPath = FormatPath(projectName, structureType == Areas ? "Area" : "Iteration");
foreach (var part in parts)
{
currentPath = FormatPath(currentPath, part);
Console.Write(currentPath);
try
{
var currentNode = css.GetNodeFromPath(currentPath);
parentNodeUri = currentNode.Uri;
Console.ForegroundColor = ConsoleColor.DarkGreen;
Console.WriteLine(" [found]");
}
catch
{
parentNodeUri = css.CreateNode(part, parentNodeUri);
Console.ForegroundColor = ConsoleColor.Green;
Console.WriteLine(" [created]");
}
Console.ResetColor();
}
}
private static string FormatPath(string currentPath, string part)
{
part = part.Substring(part.IndexOf("\\") + 1);
currentPath = string.Format(#"{0}\{1}", currentPath, part);
return currentPath;
}
private static TfsTeamProjectCollection GetProjectCollection(Uri uri)
{
TfsTeamProjectCollection collection;
if (!Collections.TryGetValue(uri, out collection))
{
collection = TfsTeamProjectCollectionFactory.GetTeamProjectCollection(uri);
collection.Connect(ConnectOptions.IncludeServices);
collection.Authenticate();
Collections.Add(uri, collection);
}
return Collections[uri];
}
private static WorkItemStore GetSourceWorkItemStore()
{
var collection = GetProjectCollection(SourceCollectionUri);
return collection.GetService<WorkItemStore>();
}
private static WorkItemStore GetTargetWorkItemStore()
{
var collection = GetProjectCollection(TargetCollectionUri);
return collection.GetService<WorkItemStore>();
}
public static NodeInfo GetCssStructure(ICommonStructureService css, String projectUri, String structureType)
{
return css.ListStructures(projectUri).FirstOrDefault(node => node.StructureType == structureType);
}
private static ICommonStructureService GetCommonStructureService()
{
var collection = GetProjectCollection(TargetCollectionUri);
return collection.GetService<ICommonStructureService>();
}
}
}

Resources