I have to remake FetchUtil.js for using it in CRM 2011 UR 12. I'm not very good in javascript, so I need some help.
This is the native code
var sFetchResult = xmlhttp.responseXML.selectSingleNode("//a:Entities").xml;
var resultDoc = new ActiveXObject("Microsoft.XMLDOM");
resultDoc.async = false;
resultDoc.loadXML(sFetchResult);
It doesn't work even in IE now, because of .selectSingleNode("//a:Entities").xml
I did it like this, but there is no xml field there.
sFetchResult = xmlhttp.responseXML.getElementsByTagName('a:Entities')[0].xml;
var resultDoc = new ActiveXObject("Microsoft.XMLDOM");
resultDoc.async = false;
resultDoc.loadXML(sFetchResult);
Help me to remake this for IE and Chrome.
Thanks a lot!
Here is my calling module (include as webresource)
(function (module, undefined) {
module.buildFetchRequest = function (fetch) {
/// <summary>
/// builds a properly formatted FetchXML request
/// based on Paul Way's blog post "Execute Fetch from JavaScript in CRM 2011"
/// http://blog.customereffective.com/blog/2011/05/execute-fetch-from-javascript-in-crm-2011.html
/// </summary>
var request = "<s:Envelope xmlns:s=\"http://schemas.xmlsoap.org/soap/envelope/\">";
request += "<s:Body>";
request += '<Execute xmlns="http://schemas.microsoft.com/xrm/2011/Contracts/Services">' +
'<request i:type="b:RetrieveMultipleRequest" ' +
' xmlns:b="http://schemas.microsoft.com/xrm/2011/Contracts" ' +
' xmlns:i="http://www.w3.org/2001/XMLSchema-instance">' +
'<b:Parameters xmlns:c="http://schemas.datacontract.org/2004/07/System.Collections.Generic">' +
'<b:KeyValuePairOfstringanyType>' +
'<c:key>Query</c:key>' +
'<c:value i:type="b:FetchExpression">' +
'<b:Query>';
request += CrmEncodeDecode.CrmXmlEncode(fetch);
request += '</b:Query>' +
'</c:value>' +
'</b:KeyValuePairOfstringanyType>' +
'</b:Parameters>' +
'<b:RequestId i:nil="true"/>' +
'<b:RequestName>RetrieveMultiple</b:RequestName>' +
'</request>' +
'</Execute>';
request += '</s:Body></s:Envelope>';
return request;
};
module.sendFetchQuery = function (fetchRequest, doneCallback, failCallback) {
//path to CRM root
var server = window.location.protocol + "//" + window.location.host;
//full path to CRM organization service - you may need to modify this depending on your particular situation
var path = server + "/XRMServices/2011/Organization.svc/web";
$.ajax({
type: "POST",
dataType: 'xml',
async: false,
contentType: "text/xml; charset=utf-8",
processData: false,
url: path,
data: fetchRequest,
beforeSend: function (xhr) {
xhr.setRequestHeader(
"SOAPAction",
"http://schemas.microsoft.com/xrm/2011/Contracts/Services/IOrganizationService/Execute"
); //without the SOAPAction header, CRM will return a 500 error
}
}).done(doneCallback)
.fail(failCallback);
};
}(window.xFetch = window.xFetch || {}));
Usage
(the parser requires jQuery ... I am doing most of my fetch calls in web resourced html pages so this isn't a problem) this works in IE and Chrome haven't checked firefox but I can't see why it wouldn't work.
var fetchXml =
xFetch.buildFetchRequest("<fetch version='1.0' output-format='xml-platform' mapping='logical' distinct='false'>" +
" <entity name='ENTITYNAME'>" +
" <attribute name='ATTRIBUTE' />" +
" </entity>" +
"</fetch>");
var entityList = new Array();
xFetch.sendFetchQuery(fetchXml,
function (fetchResponse) {
// chrome doesn't like the namespaces because of
// selectSingleNode implementations (which make sense btw)
// I'll never understand why Microsoft have to pepper their xml
// with namespace dross
$(fetchResponse).find("a\\:Entity, Entity").each(function () {
var entityData = {};
$(this).find("a\\:KeyValuePairOfstringanyType, KeyValuePairOfstringanyType").each(function () {
var xmlElement = $(this);
var key = xmlElement.find("b\\:key, key").text();
var value = xmlElement.find("b\\:value, value").text();
entityData[key] = value;
});
//inner loop
$(this).find("a\\:KeyValuePairOfstringstring, KeyValuePairOfstringstring").each(function () {
var xmlElement = $(this);
var key = xmlElement.find("b\\:key, key").text();
var value = xmlElement.find("b\\:value, value").text();
entityData[key] = value;
});
entityList.push(entityData);
});
}, function (jqXhr, textStatus, errorThrown) {
// if unsuccessful, generate an error alert message
});
for (var i = 0; i < entityList.length; i++) {
if (entityList[i].ATTRIBUTE === "Yes" ){
// DO WHATEVER
}
}
I only needed attributes with KeyValuePairOfstringstring and KeyValuePairOfstringanyType but you could parse out any attribute with the right combination of selectors
each item in retrieved
I was facing the similar issue and I resolved it by using below workaround.
var sFetchResult = xmlhttp.response;
var tempresultDoc = new ActiveXObject("Microsoft.XMLDOM");
tempresultDoc.async = false;
tempresultDoc.loadXML(sFetchResult);
// Now at this point we will have the XML file. Get the singleNode from the XML by using below code.
var resultDoc = new ActiveXObject("Microsoft.XMLDOM");
resultDoc.async = false;
resultDoc.loadXML(tempresultDoc.childNodes[0].selectSingleNode("//a:Entities").xml);
Regards,
Krutika Suchak
If you're looking for a version that doesn't require JQuery, and one that parses the results, check this out. It not only wraps the FetchXML, but also parses the response XML into JavaScript objects for easy retrieval.
Related
On load of my page I execute this function
function getConnection() {
$.ajax({
type: "GET",
url: "../webservice/anonymous_PS.asmx/Get",
data: { "PSname": "LISTE_CONNEXTION" },
async : false ,
success: function (response) {
var data = response.getElementsByTagName("NewDataSet")[0]
for (let i = 0; i < data.children.length; i++) {
var c1Nb = $(data.children[i]).find('c1').text()
var c2Nb = $(data.children[i]).find('c2').text()
var c1 = document.getElementById("cs" + c1Nb)
var c2 = document.getElementById("cs" + c2Nb)
var line = $("#l_" + c1Nb + "_" + c2Nb)
}
}
})
}
But when I do that I have this error on Firefox :
XML Parsing Error: not well-formed
Location:
Line Number 1, Column 131:
and on chrome sometimes I have this error :
devtools was disconnected from the page
How can I resolve my issue ?
Try parsing your response, you can use $.parseXML(response) if you want to parse your response to xml or $.parseHTML(response) if you want to parse your response to html.
Once the parsing is done then your getElementsByTagName("NewDataSet")[0] will work and you will not get any error.
The final code will look something like:
var parsedResponse = $.parseXML(response);
var data = parsedResponse.getElementsByTagName("NewDataSet")[0];
I'm using Github Gists for a web playground I'm making as a side project. I load two json files into the editor. 1 handles all the libraries (jquery, bootstrap, etc:) and another for the users settings (fontsize, version, etc:)
So anyway I have this JSON named settings
var settings = gistdata.data.files["settings.json"].content
var jsonSets = JSON.parse(settings)
I parse and attempted to grab an object from the JSON and set it as a value of a input textbox.
Now console.log(jsonSets.siteTitle) works perfectly fine
but when I try to change the input dynamically...
$("[data-action=sitetitle]").val(jsonSets.siteTitle).trigger("change")
The problem is it's not actually applying the value!
The only way I've been able to successfully apply the value is...
setTimeout(function() {
$("[data-action=sitetitle]").val(jsonSets.siteTitle).trigger("change")
}, 5000)
Which is ridiculously slow.
Does anyone know why it's not applying the value?
in addition.
How can I solve this problem?
var hash = window.location.hash.substring(1)
if (window.location.hash) {
function loadgist(gistid) {
$.ajax({
url: "https://api.github.com/gists/" + gistid,
type: "GET",
dataType: "jsonp"
}).success(function(gistdata) {
var libraries = gistdata.data.files["libraries.json"].content
var settings = gistdata.data.files["settings.json"].content
var jsonLibs = JSON.parse(libraries)
var jsonSets = JSON.parse(settings)
// Return libraries from json
$.each(jsonLibs, function(name, value) {
$(".ldd-submenu #" + name).prop("checked", value)
})
// Return font settings from json
var siteTitle = jsonSets.siteTitle
var WeaveVersion = jsonSets.version
var editorFontSize = jsonSets.editorFontSize
var WeaveDesc = jsonSets.description
var WeaveAuthor = jsonSets.author
$("[data-action=sitetitle]").val(siteTitle).trigger("change")
$("[data-value=version]").val(WeaveVersion).trigger("change")
$("[data-editor=fontSize]").val(editorFontSize).trigger("change")
$("[data-action=sitedesc]").val(WeaveDesc).trigger("change")
$("[data-action=siteauthor]").val(WeaveAuthor).trigger("change")
}).error(function(e) {
// ajax error
console.warn("Error: Could not load weave!", e)
})
}
loadgist(hash)
} else {
// No hash found
}
My problem was actually related to localStorage.
I cleared it localStorage.clear(); ran the ajax function after and it solved the problem.
var hash = window.location.hash.substring(1)
if (window.location.hash) {
localStorage.clear()
function loadgist(gistid) {
$.ajax({
url: "https://api.github.com/gists/" + gistid,
type: "GET",
dataType: "jsonp",
jsonp: "callback"
}).success(function(gistdata) {
var htmlVal = gistdata.data.files["index.html"].content
var cssVal = gistdata.data.files["index.css"].content
var jsVal = gistdata.data.files["index.js"].content
var mdVal = gistdata.data.files["README.md"].content
var settings = gistdata.data.files["settings.json"].content
var libraries = gistdata.data.files["libraries.json"].content
var jsonSets = JSON.parse(settings)
var jsonLibs = JSON.parse(libraries)
// Return font settings from json
var siteTitle = jsonSets.siteTitle
var WeaveVersion = jsonSets.version
var editorFontSize = jsonSets.editorFontSize
var WeaveDesc = jsonSets.description
var WeaveAuthor = jsonSets.author
$("[data-action=sitetitle]").val(siteTitle)
$("[data-value=version]").val(WeaveVersion)
$("[data-editor=fontSize]").val(editorFontSize)
$("[data-action=sitedesc]").val(WeaveDesc)
$("[data-action=siteauthor]").val(WeaveAuthor)
storeValues()
// Return settings from the json
$(".metaboxes input.heading").trigger("keyup")
// Return libraries from json
$.each(jsonLibs, function(name, value) {
$(".ldd-submenu #" + name).prop("checked", value).trigger("keyup")
})
// Set checked libraries into preview
$("#jquery").trigger("keyup")
// Return the editor's values
mdEditor.setValue(mdVal)
htmlEditor.setValue(htmlVal)
cssEditor.setValue(cssVal)
jsEditor.setValue(jsVal)
}).error(function(e) {
// ajax error
console.warn("Error: Could not load weave!", e)
})
}
loadgist(hash)
} else {
// No hash found
}
So, this is the code I have, console.log gives me the right value, but the function doesn't return the value, even if the return is inside the timeout. I must be doing something wrong.
function countyfinder(address){
var rr =$.getJSON('https://maps.googleapis.com/maps/api/geocode/json?address=' + address.replace(" ", "%20")).done(function(data) {
var county = data.results[0].address_components[3].short_name;
//return county;//data is the JSON string
});return rr;};
function calculatetax(address, price){
var j = countyfinder(address);
setTimeout(function(){var k = j["responseJSON"]['results'][0]['address_components'][3]['short_name'];
console.log(k);//return k won't work in here either
}, 1000); return k
};
this is what I ended up with:
var jq = document.createElement('script');
jq.src = "//ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js";
document.getElementsByTagName('head')[0].appendChild(jq);
function getCounty(address) {
var country;
var baseApiUrl = "https://maps.googleapis.com/maps/api/geocode/json";
var query = "?address=" + encodeURIComponent(address);
var queryUrl = baseApiUrl + query;
$.ajax({
url: queryUrl,
async: false,
dataType: 'json',
success: function(data) {
county = gmapsExtractByType(data, "administrative_area_level_2 political");
}
});
return countr.long_name;
}
function gmapsExtractByType(json, type) {
return json.results[0].address_components.filter(function(element) {
return element.types.join(" ") === type;
})[0];
}
console.log( getCounty("100 wacko lane ohio") );
I had to use a synchronous request by changing some settings in the ajax request. The drawback of this is that the browser will be locked up until you get a request response, which can be bad on a slow connection or a connection with an unreliable server. With google, most of the time, I don't think that will happen.
Actually I have an issue with javascript. I find no solution for this problem yet.
Maybe someone of you could give me a hint.
I have a created a function, which is called by a button click.in SharePoint 2010.
The function should collect all selected / checked documents from a document library and write them into a separate box, I created. To get all selected documents works fine. But in SharePoint I have to load each element individually for details with an asynchronous request. Here comes my problem:
If I select more than one document, the variable "item" will be overwritten because of the "for" loop. In my asynchronous request success function, I use now the variable "item" again to get the details of it. So I always get the data of the last item of my selection.
Is there a way to avoid this?
Thanks for any help.
Here is my code:
function ApproveDocuments() {
var ClientContext = SP.ClientContext.get_current();
var LibraryID = SP.ListOperation.Selection.getSelectedList();
var Library = ClientContext.get_web().get_lists().getById(LibraryID); //Gets the current Library
var SelectedDocuments = SP.ListOperation.Selection.getSelectedItems(ClientContext);
for (var currentItem in SelectedDocuments) {
var item = Library.getItemById(SelectedDocuments[currentItem].id);
ClientContext.load(item, 'FileLeafRef');
ClientContext.executeQueryAsync(Function.createDelegate(this, function () {
var newElementHtml = '<div style="float:left;padding:3px;width:50px;"></div>';
newElementHtml += '<div style="float:left;padding:3px;">' + item.get_item('FileLeafRef') + '</div>';
newElementHtml += '<div style="clear:both;"></div>';
jQuery("#grol1855InfoDivData").append(newElementHtml);
}), Function.createDelegate(this, this.onLoadItemFailure));
}}
I would refactor this a bit so that you don't make an http request every time you iterate through the loop, which should also solve your over-write problem. I also declared the variables outside of the loops and make it point to the new version each iteration.
function ApproveDocuments() {
var ClientContext = SP.ClientContext.get_current();
var LibraryID = SP.ListOperation.Selection.getSelectedList();
var Library = ClientContext.get_web().get_lists().getByID(LibraryID); //Gets the current Library
var SelectedDocuments = SP.ListOperation.Selection.getSelectedItems(ClientContext);
var fileItems = [], item;
for (var currentItem in SelectedDocuments) {
item = Library.getItemById(SelectedDocuments[currentItem].id);
fileItems.push(item);
ClientContext.load(item, 'FileLeafRef');
}
ClientContext.executeQueryAsync(Function.createDelegate(this, function() {
var newElementHtml;
for (var i = 0; i < fileItems.length; i++) {
newElementHtml = '<div style="float:left;padding:3px;width:50px;"></div>';
newElementHtml += '<div style="float:left;padding:3px;">' + fileItems[i].get_item('FileLeafRef') + '</div>';
newElementHtml += '<div style="clear:both;"></div>';
jQuery("#grol1855InfoDivData").append(newElementHtml);
}
}), Function.createDelegate(this, this.onLoadItemFailure));
}
I would also really advise against writing new solutions that call any of the SOAP services or anything in _vti_bin for that matter; it's just a matter of time before those go away and your stuff won't work.
I have also tried ClientContext.executeQueryAsync in a loop but have never gotten it to work for similar reasons. I've worked around this before by using Ajax to call the SharePoint lists.asmx web service. For example:
var targetUrl = "/_vti_bin/lists.asmx";
var listName = "Shared Documents";
for (var currentItem in SelectedDocuments) {
var currentItemId = SelectedDocuments[currentItem].id;
var soapEnvArray = [];
soapEnvArray.push("<?xml version=\"1.0\" encoding=\"utf-8\"?>");
soapEnvArray.push("<soap:Envelope ");
soapEnvArray.push("xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" xmlns:soap=\"http://schemas.xmlsoap.org/soap/envelope/\">");
soapEnvArray.push("<soap:Body><GetListItems xmlns=\"http://schemas.microsoft.com/sharepoint/soap/\"><listName>" + listName + "</listName>");
soapEnvArray.push("<viewName></viewName>");
soapEnvArray.push("<query>");
soapEnvArray.push("<Where>");
soapEnvArray.push("<Eq>");
soapEnvArray.push("<FieldRef Name=\"ID\"></FieldRef>");
soapEnvArray.push("<Value Type=\"Counter\">" + currentItemId + "</Value>");
soapEnvArray.push("</Eq>");
soapEnvArray.push("</Where>");
soapEnvArray.push("</query>");
soapEnvArray.push("<viewFields>");
soapEnvArray.push("</viewFields>");
soapEnvArray.push("<rowLimit>2000</rowLimit><queryOptions><QueryOptions xmlns=\"\">");
soapEnvArray.push("<IncludeMandatoryColumns>FALSE</IncludeMandatoryColumns>");
soapEnvArray.push("<ViewAttributes Scope = \"RecursiveAll\"/>");
soapEnvArray.push("</QueryOptions></queryOptions>");
soapEnvArray.push("</GetListItems></soap:Body></soap:Envelope>");
var soapEnv = soapEnvArray.join("");
$.ajax({
cache: false,
url: targetUrl,
type: "POST",
dataType: "xml",
data: soapEnv,
contentType: "text/xml; charset=utf-8",
beforeSend: function (xhr) {
xhr.setRequestHeader("SOAPAction", "http://schemas.microsoft.com/sharepoint/soap/GetListItems");
},
complete: function (msg) {
if (msg.status == 200) {
var totalTaskCount = $(msg.responseXML).find("z\\:row, row").length;
$(msg.responseXML).find("z\\:row, row").each(function () {
console.log(currentItemId + ": " + $(this).attr("ows_Title"));
});
} else {
//Failure
var errorCode = $(msg.responseXML).find("errorcode").text();
var errorString = $(msg.responseXML).find("errorstring").text();
if (errorString.length === 0) {
errorString = $(msg.responseXML).find("faultstring").text();
}
errorString = errorString.replace(/(\r\n|\n|\r)/gm, "");
}
}
});
}
To make SharePoint Ajax requests easier, I would recommend picking up a copy of SharePoint CAML Query Helper for 2007, 2010, and 2013. Also, although I have never used it, you may want to consider trying the SPServices jQuery Library for SharePoint to simplify the task.
I'm currently working on incorporating an authorization feature for Twitter following the approach described here: https://dev.twitter.com/docs/auth/implementing-sign-twitter. I'm using Ajax to send my POST 'http' request, but I've been constantly running into a '401: Unauthorized' error. My code is below:
function getTweets() {
var time = generateTimestamp();
var nonce = generateNonce();
var signature = generateSignature(time, nonce);
var headers = {
"Authorization": 'OAuth oauth_callback="http%3A%2F%2Fwww.google.com%2F", oauth_consumer_key="eEeAAz9fakedtAOlIUhPgQ", oauth_nonce="bbc34b2ca6faabogus6dfc025907fa334", oauth_signature="' + signature + '", oauth_signature_method="HMAC-SHA1", oauth_timestamp="' + time + '", oauth_version="1.0"'
};
$.ajax({
type: "POST",
url: "https://api.twitter.com/oauth/request_token",
dataType: "text",
headers: headers,
success: function(data) {
alert("Success!");
console.log(data);
},
error: function(jq) {
alert("Request Failed.");
console.log(jq.statusText);
}
});
}
function generateTimestamp() {
var currentTime = new Date;
currentTime = Math.floor(currentTime.getTime() / 1000);
return currentTime;
}
function generateNonce() {
var code = "";
for (var i = 0; i < 20; i++) {
code += Math.floor(Math.random() * 9).toString();
}
return code;
}
function generateSignature(timestamp, nonce) {
var http_method = "POST";
var base_url = "https://api.twitter.com/oauth/request_token";
var consumer_key = "eEeAAz9hUKtdjunkeIUhPgQ";
var consumer_secret = "c7wHxnjubxVDcc5hYFqnotactuallymysecretWs2XazUFde0lPRBtBQ";
var signature_method = "HMAC-SHA1";
var token = "609493744-kNPzLKSI4Hg9NWQnopeFPb91eXFUutFm1nZ2hDk2";
var token_secret = "15WOJS9Ji1AXsKRkyAZrxKdsalted5Gj5ZyEAb9aVrJxI";
var version = "1.0";
var parameter_string = "oauth_callback=" + encodeURIComponent(base_url) + "&oauth_consumer_key=" + consumer_key + "&oauth_nonce=" + nonce + "&oauth_consumer_key=" + consumer_key + "&oauth_signature_method=" + signature_method + "&oauth_timestamp=" + timestamp +"&oauth_version=" + version;
var base_string = http_method + "&" + encodeURIComponent(base_url) + "&" + encodeURIComponent(parameter_string);
var signing_key = encodeURIComponent(consumer_secret) + "&";
var signature = encodeURIComponent(window.btoa(CryptoJS.HmacSHA1(base_string, signing_key)));
alert(signature);
return signature;
}
Feel free to post below if there's any other information that would make this error clearer. Thanks.
I created a node.js library to mess around with the Twitter OAuth dance and API. Code is here, tweeter.js
You're welcome to walk through the logic for creating the header and signature (starting at line 348 )
One thing I don't see in the code you've posted and which will make a huge difference is that the signature string must be generated to include the original header, then the header must be rebuilt with the generated string. It's a huge pain and it took me a while to figure it out.
Although the code I wrote is geared toward node.js, you should be able to reuse a lot of the logic to meet your needs.
EDIT
I found a site called hueniverse documented OAuth very well. In fact, there is a utility here to build your own headers for validating your logic (select the 'Create your own' radio button).
EDIT 2
To better explain including the oauth_signature value in the header, suppose you have all of the data up to this point:
var headerObj = {
oauth_consumer_key="123456789",
oauth_token="11111",
oauth_nonce="asdfghjkl%3B",
oauth_timestamp="1341852000",
oauth_signature_method="HMAC-SHA1",
oauth_version="1.0"
};
You create the HMAC-SHA1 signature and receive: "jBpoONisOt5kFYOrQ5fHCSZBGkI%3D"
You would then add that return value to headerObj, giving you:
headerObj = {
oauth_consumer_key="123456789",
oauth_token="11111",
oauth_nonce="asdfghjkl%3B",
oauth_timestamp="1341852000",
oauth_signature_method="HMAC-SHA1",
oauth_version="1.0",
oauth_signature="jBpoONisOt5kFYOrQ5fHCSZBGkI%3D"
};
And this modified version of headerObj is what you build your HTTP headers from.
GET / HTTP/1.1
Host: api.twitter.com:443
Authorization: OAuth realm="https://api.twitter.com/",
oauth_consumer_key="123456789",
oauth_token="11111",
oauth_nonce="asdfghjkl%3B",
oauth_timestamp="1341852000",
oauth_signature_method="HMAC-SHA1",
oauth_version="1.0",
oauth_signature="jBpoONisOt5kFYOrQ5fHCSZBGkI%3D"
NOTE: I didn't verify the host/realm/port, so these are probably wrong. Check the API for those.
The reason this is done is that on Twitter's side (this is an OAuth implementation detail), the oauth_signature value is removed and the rest of the header is hashed and its return value is compared to the value sent in oauth_signature. It's sort of like a wax seal on an envelope... if the hash of the rest of the header doesn't match the hash value you sent in oauth_signature, Twitter knows not to trust the sender or the contents.
EDIT 2.5
I'm moving this from the comment to the answer.
If you check out this line in tweeter.js, you'll see the logic.
var signature = self.oauthSignature(method, path, headerObj, query);
headerObj.oauth_signature = qs.escape(signature);
// concat the header object into a csv string
var header = 'OAuth realm="Twitter API",';
var oauthParts = [];
for (var h in headerObj) {
oauthParts.push(h + '="'+headerObj[h]+'"');
}
header+= oauthParts.join(',');
//...
return header;
This bit of code does as I've explained in EDIT 2, by converting a JSON object into key="value" strings stored in oauthParts[], then joins each element in that array into a single comma-separated string which begins with OAuth realm="Twitter API",