Problem
When it comes to upgrading attachments from AX2012 to D365FO, only URLs, notes, and files saved in the former's database may be transferred to the Azure Blob Storage (on-cloud).
Therefore, all files from attachments in AX2012 should be moved to the database first in order to be successfully migrated to D365FO.
What if there are millions of them? Technically speaking it might be a good option to save them on SharePoint (on-cloud); however, unfortunately, such attachments links will be deleted, too.
You can find more detail on Document management in D365FO in https://docs.microsoft.com/en-us/dynamics365/fin-ops-core/fin-ops/organization-administration/configure-document-management
Technical details
During the Ax2012-D365FO database upgrade process, among other standard classes ('scripts') ReleaseUpdateDB72_Docu is triggered, which actually deletes records in DocuValue and related tables for all files not saved in the data base including even those referenced on a local SharePoint server.
Solution
As a solution we can move all external files referenced in AX2012 attachments to a on-cloud SharePoint server first
Then an extension to the aforementioned class must be triggered during the standard DB upgrade procedure; so that it would keep existing links and update them accordingly to a new SharePoint folder structure.
For example, we can agree that existing local folders will be reproduced on the SharePoint Server instance.
The following code must be adapted accordingly to your landscape and tested first on a small set of files in a dev environment.
Please, use it at your own risk.
[ExtensionOf(classStr(ReleaseUpdateDB72_Docu)) final class myReleaseUpdateDB72_Docu_Extension { public const str myLegacy = 'Legacy'; public const int myActionClassId = 118; //DocuActionURLClassId public const str myName = 'Legacy attachments for '; public const str myHost = 'myCompany.sharepoint.com'; public const str mySite = '/sites/D365FOFileShare'; public const str myUpgradeModifiedUser = 'myAxDocUpgradeUser'; // fake user for marking records public const int myMaxRowsToUpdatePerStatement = 10000; public const str myPart1 = "https://myCompany.sharepoint.com/sites/D365FOFileShare/Legacy/"; public const str myPart2 = "https://myCompany.sharepoint.com/sites/D365FOFileShare/_api/Web/GetFileByServerRelativePath(decodedurl=''/sites/D365FOFileShare/Legacy/"; public const str myPart3 = "'')"; public const str myPart4 = "/"; public const str myPart5 = "."; public const str myPart6 = "''"; /// <summary> /// Sets a special value to File field to avoid dropping these records by standard script /// To be run BEFORE the script /// </summary> private void myPreUpdateDocuValue_CorrectFileLocations() { SysDictTable docuValueTable = new SysDictTable(tableNum(DocuValue)); SysDictTable docuRefTable = new SysDictTable(tableNum(DocuRef)); SysDictTable docuTypeTable = new SysDictTable(tableNum(DocuType)); str sqlQuery; Connection connection = new Connection(); try { int impactedRows; // First update all DocuValues with null files and empty path // these files are placed in network shared folders and must be retargeted to SharePoint server // with setting FILE to a dummy values so that the standard next() won't delete them do { sqlQuery = strFmt(@" UPDATE TOP (%8) docValue SET docValue.%6 = CAST('%1' AS VARBINARY) FROM %2 docValue JOIN %3 docRef ON docRef.%7 = docValue.%5 JOIN %11 docType ON (docRef.%12 = docType.%13 and docRef.%14 = docType.%15) AND docValue.%6 IS NULL AND docValue.%9 ='' AND docValue.%10 = 0 AND docType.%16 = 0", myUpgradeModifiedUser, // %1 - upgrade modified user docuValueTable.name(DbBackend::Sql), // %2 - DocuValue docuRefTable.name(DbBackend::Sql), // %3 - DocuRef docuValueTable.fieldName(fieldNum(DocuValue, ModifiedBy), DbBackend::Sql), // %4 - DocuValue.ModifiedBy docuValueTable.fieldName(fieldNum(DocuValue, RecId), DbBackend::Sql), // %5 - DocuValue.RecId docuValueTable.fieldName(fieldNum(DocuValue, File), DbBackend::Sql), // %6 - DocuValue.File docuRefTable.fieldName(fieldNum(DocuRef, ValueRecId), DbBackend::Sql), // %7 - DocuRef.ValueRecId myMaxRowsToUpdatePerStatement, // %8 - Max rows to update per statement docuValueTable.fieldName(fieldNum(DocuValue, PATH), DbBackend::Sql), // %9 - DocuRef.PATH docuValueTable.fieldName(fieldNum(DocuValue, Type), DbBackend::Sql), // %10 - DocuValue.Type docuTypeTable.name(DbBackend::Sql), // %11 - docuTypeTable docuRefTable.fieldName(fieldNum(DocuRef, TypeId), DbBackend::Sql), // %12 - DocuRef.TypeId docuTypeTable.fieldName(fieldNum(DocuType, TypeId), DbBackend::Sql), // %13 - DocuType.TypeId docuRefTable.fieldName(fieldNum(DocuRef,ACTUALCOMPANYID), DbBackend::Sql), // %14 - DocuType.ACTUALCOMPANYID docuTypeTable.fieldName(fieldNum(DocuType, DATAAREAID), DbBackend::Sql), // %15 - DocuType.DATAAREAID docuTypeTable.fieldName(fieldNum(DocuType, FILEPLACE), DbBackend::Sql) // %16 - DocuType.FILEPLACE ); impactedRows = this.myExecuteSQL(sqlQuery, connection); } while (impactedRows == myMaxRowsToUpdatePerStatement); } finally { connection.finalize(); } } /// <summary> /// Nulls FILE field back and updates other field to keep SharePoint links correctly /// To be run AFTER the script /// </summary> private void myPostUpdateDocuValue_CorrectFileLocations() { SysDictTable docuValueTable = new SysDictTable(tableNum(DocuValue)); SysDictTable docuRefTable = new SysDictTable(tableNum(DocuRef)); SysDictTable docuTypeTable = new SysDictTable(tableNum(DocuType)); str sqlQuery; Connection connection = new Connection(); try { int impactedRows; // First update all premarked DocuRef with the new SharePoint docuType do { sqlQuery = strFmt(@" UPDATE TOP (%8) docRef SET docRef.%17 = '%21' + '_' + docRef.%18 FROM %3 docRef JOIN %2 docValue ON docRef.%7 = docValue.%5 AND docValue.%6 = CAST('%1' AS VARBINARY) and docRef.%17 <> '%21' + '_' + docRef.%18", myUpgradeModifiedUser, // %1 - upgrade modified user docuValueTable.name(DbBackend::Sql), // %2 - DocuValue docuRefTable.name(DbBackend::Sql), // %3 - DocuRef docuValueTable.fieldName(fieldNum(DocuValue, Type), DbBackend::Sql), // %4 - DocuValue.Type docuValueTable.fieldName(fieldNum(DocuValue, RecId), DbBackend::Sql), // %5 - DocuValue.RecId docuValueTable.fieldName(fieldNum(DocuValue, File), DbBackend::Sql), // %6 - DocuValue.File docuRefTable.fieldName(fieldNum(DocuRef, ValueRecId), DbBackend::Sql), // %7 - DocuRef.ValueRecId myMaxRowsToUpdatePerStatement, // %8 - Max rows to update per statement docuValueTable.fieldName(fieldNum(DocuValue, PATH), DbBackend::Sql), // %9 - DocuRef.PATH docuValueTable.fieldName(fieldNum(DocuValue, StorageProviderId), DbBackend::Sql), // %10 - DocuRef.StorageProviderId docuValueTable.fieldName(fieldNum(DocuValue, AccessInformation), DbBackend::Sql), // %11 - DocuRef.AccessInformation myPart1, // %12 - https://myCompany.sharepoint.com/sites/D365FOFileShare/ myPart2, // %13 - https://myCompany.sharepoint.com/sites/D365FOFileShare/api/Web/GetFileByServerRelativePath(decodedurl='/sites/D365FOFileShare/ myPart3 , // %14 - ') myPart4 , // %15 - / myPart5 , // %16 - . docuRefTable.fieldName(fieldNum(DocuRef, TYPEID), DbBackend::Sql), //%17 - DocuRef.TypeId docuRefTable.fieldName(fieldNum(DocuRef, ACTUALCOMPANYID), DbBackend::Sql), //%18 - 'SPND' docuValueTable.fieldName(fieldNum(DocuValue, FILENAME), DbBackend::Sql), // %19 - DocuValue.FILENAME docuValueTable.fieldName(fieldNum(DocuValue, FILETYPE), DbBackend::Sql), // %20 - DocuValue.FILETYPE myLegacy // %21 - 'Legacy' ); impactedRows = this.myExecuteSQL(sqlQuery, connection); } while (impactedRows == myMaxRowsToUpdatePerStatement); impactedRows = 0; // Now update all premarked DocuValues with new paths and unmark them do { sqlQuery = strFmt(@" UPDATE TOP (%8) docValue SET docValue.%6 = NULL, docValue.%4 = 0, docValue.%10 = 2, docValue.%9 = '%12'+ docRef.%18 + '%15'+ docValue.%19+'%16' + docValue.%20, docValue.%11 = '%13'+ + docRef.%18 + '%15'+ docValue.%19+'%16' + docValue.%20 + '%14' FROM %2 docValue JOIN %3 docRef ON docRef.%7 = docValue.%5 AND docValue.%6 = CAST('%1' AS VARBINARY)", //@myPart1 + dr.ACTUALCOMPANYID + @myPart4 + dv.FILENAME+ @myPart5 + dv.FILETYPE myUpgradeModifiedUser, // %1 - upgrade modified user docuValueTable.name(DbBackend::Sql), // %2 - DocuValue docuRefTable.name(DbBackend::Sql), // %3 - DocuRef docuValueTable.fieldName(fieldNum(DocuValue, Type), DbBackend::Sql), // %4 - DocuValue.Type docuValueTable.fieldName(fieldNum(DocuValue, RecId), DbBackend::Sql), // %5 - DocuValue.RecId docuValueTable.fieldName(fieldNum(DocuValue, File), DbBackend::Sql), // %6 - DocuValue.File docuRefTable.fieldName(fieldNum(DocuRef, ValueRecId), DbBackend::Sql), // %7 - DocuRef.ValueRecId myMaxRowsToUpdatePerStatement, // %8 - Max rows to update per statement docuValueTable.fieldName(fieldNum(DocuValue, PATH), DbBackend::Sql), // %9 - DocuRef.PATH docuValueTable.fieldName(fieldNum(DocuValue, StorageProviderId), DbBackend::Sql), // %10 - DocuRef.StorageProviderId docuValueTable.fieldName(fieldNum(DocuValue, AccessInformation), DbBackend::Sql), // %11 - DocuRef.AccessInformation myPart1, // %12 - https://myCompany.sharepoint.com/sites/D365FOFileShare/ myPart2, // %13 - https://myCompany.sharepoint.com/sites/D365FOFileShare/api/Web/GetFileByServerRelativePath(decodedurl='/sites/D365FOFileShare/ myPart3 , // %14 - ') myPart4 , // %15 - / myPart5 , // %16 - . docuRefTable.fieldName(fieldNum(DocuRef, TYPEID), DbBackend::Sql), //%17 - DocuRef.TypeId docuRefTable.fieldName(fieldNum(DocuRef, ACTUALCOMPANYID), DbBackend::Sql), //%18 - 'SPND' docuValueTable.fieldName(fieldNum(DocuValue, FILENAME), DbBackend::Sql), // %19 - DocuValue.FILENAME docuValueTable.fieldName(fieldNum(DocuValue, FILETYPE), DbBackend::Sql), // %20 - DocuValue.FILETYPE myLegacy // %21 - 'Legacy' ); impactedRows = this.myExecuteSQL(sqlQuery, connection); } while (impactedRows == myMaxRowsToUpdatePerStatement); } finally { connection.finalize(); } } /// <summary> /// Updates document reference and value records to handle file storage in the cloud. /// </summary> [ UpgradeScriptDescription("Updates document value records to handle file storage in the cloud"), UpgradeScriptStage(ReleaseUpdateScriptStage::PostSync), UpgradeScriptType(ReleaseUpdateScriptType::PartitionScript), UpgradeScriptTable(tableStr(DocuRef), false, true, true, false), UpgradeScriptTable(tableStr(DocuValue), false, true, true, true) ] public void updateDocuValue_CorrectFileLocations() { this.myPreUpdateDocuValue_CorrectFileLocations(); next updateDocuValue_CorrectFileLocations(); this.myPostUpdateDocuValue_CorrectFileLocations(); } /// <summary> /// Updates document type records to handle file storage in the cloud. /// </summary> [ UpgradeScriptDescription("Updates document type records to handle file storage in the cloud"), UpgradeScriptStage(ReleaseUpdateScriptStage::PostSync), UpgradeScriptType(ReleaseUpdateScriptType::PartitionScript), UpgradeDependsOnTaskAttribute(methodStr(ReleaseUpdateDB72_Docu, updateDocuValue_CorrectFileLocations)), UpgradeScriptTable(tableStr(DocuType), false, true, true, false) ] public void updateDocuType_CorrectFilePlacement() { next updateDocuType_CorrectFilePlacement(); this.myCreateNewDocuType(); } /// <summary> /// Executes the provided SQL statement. /// </summary> /// <param name="_sqlStatement">The SQL statement to execute.</param> /// <param name="_connection>The SQL connection to use; otherwise a new connection will be created.</param> /// <returns>The number of rows impacted by the statement.</returns> private int myExecuteSQL(str _sqlStatement, Connection _connection = null) { Connection sessionConn = _connection ? _connection : new Connection(); try { Statement statement = sessionConn.createStatement(); new SqlStatementExecutePermission(_sqlStatement).assert(); int impactedRows = statement.executeUpdate(_sqlStatement); statement.close(); CodeAccessPermission::revertAssert(); return impactedRows; } finally { if (!_connection) { sessionConn.finalize(); } } } /// <summary> /// gets a Set of all legal entities present in the staging /// </summary> /// <returns>Set</returns> public Set getCompanySet() { DocuRef docuRef; Set companySet = new Set(Types::String); while select ActualCompanyId from docuRef group by ActualCompanyId { companySet.add(docuRef.ActualCompanyId); } return companySet; } /// <summary> /// Creates new DocuType records for legacy attachment moved now to SharePoint /// </summary> private void myCreateNewDocuType() { Set companySet = this.getCompanySet(); SetEnumerator se = companySet.getEnumerator(); DocuType documentType; ttsbegin; while (se.MoveNext()) { SelectableDataArea currCompany = se.current(); changecompany(currCompany) { DocuTypeId typeId = myLegacy+'_'+currCompany; if(!DocuType::exist( typeId)) { documentType.clear(); documentType.TypeGroup = DocuTypeGroup::File; documentType.RemoveOption = DocuRemoveOption::DocumentAndFile; documentType.FileRemovalConfirmation = NoYes::Yes; documentType.TypeId = typeId; documentType.ActionClassId = myActionClassId; //DocuActionURLClassId documentType.Name = myName+currCompany; documentType.FilePlace = DocuFilePlace::SharePoint; documentType.Host = myHost; documentType.Site = mySite; documentType.FolderPath = myLegacy+'/'+currCompany; documentType.doInsert(); } } } ttscommit; } }]
No comments:
Post a Comment