French cheque when printing Invoice : cancel validation of ledger journal

You maybe need to print a cheque for a French customer BUT when you set the parameters for do that, a new ledger journal is created AND validate !

You maybe don’t want of this fonction.

For stop this processus, you must modify the class : CustVendPaymInvoiceWithJournal. And you must create a new method et modify the method run()

void myDeleteJournal()
{
    LedgerJournalTable      ledgerJournalTableToDelete;
    LedgerJournalTrans      ledgerJournalTrans;
    ;

    ttsbegin;

    //  Change status of line
    while select forupdate ledgerJournalTrans
    where ledgerJournalTrans.JournalNum     == ledgerJournalTable.JournalNum
    {
        ledgerJournalTrans.PaymentStatus = CustVendPaymStatus::None;
        ledgerJournalTrans.update();
    }

    //  Delete the lines
    while select forupdate ledgerJournalTrans
    where ledgerJournalTrans.JournalNum     == ledgerJournalTable.JournalNum
    {
        ledgerJournalTrans.delete();
    }

    //  Delete the journal
    select forupdate firstonly ledgerJournalTableToDelete
    where ledgerJournalTableToDelete.JournalNum == ledgerJournalTable.JournalNum;

    ledgerJournalTableToDelete.delete();

    ttscommit;
}
void run()
{
    setprefix("@SYS17561");

    try
    {
        if (!paymModeTable.PaymJournalNameId)
            throw error("@SYS16764");

        ttsbegin;
        this.createJournal();
        this.createPayment();
        ttscommit;
        this.generatePayment();
// MY NEW CODE - BEGIN
//        this.postJournal();
        switch (this.parmCustVendInvoiceJour().module())
        {
            case SysModule::Cust, SysModule::Project :
                    this.myDeleteJournal();
                    break;

            default :
                    this.postJournal();
                    break;
        }
// MY NEW CODE - END
    }

    catch (Exception::Error)
    {
        info ("@SYS83591");
    }
}
Advertisements

When a object is lock by someone else

Tips available for v4

You must go to the menu : Options / Development tool / Object Application / “Object Locked” (I’m not sure of this term)

Tips available for v4 and v5

All record, form, table locked, can be found on this table :UtilLocks.

So, we can again lock form that we will … or thought the method (for exemple on a form : FormRun.lock(); )

Data migration and tableId and field have changed.

You will proceed at a data migration, but some Id have change !

There is 2 job for help you !

The first is for generate all Id highter than 30000 for example.

static void AX2009_GenerateCSVFromIds(Args _args)
{
    #AviFiles
    #File
    #define.ID_MAX          (60000)
    #define.ID_MIN          (30000)

    #define.POS_TABLE_ID    (1)
    #define.POS_TABLE_NAME  (2)
    #define.POS_FIELD_ID    (3)
    #define.POS_FIELD_NAME  (4)

    int                     idx             = 1;
    int                     fieldIdx;
    container               value;
    FileName                fileName        = "H:" + #FilePathDelimiter + "export_id_v5.csv";
    FieldId                 fieldId;

    SysDictTable            sysDictTable;
    SysDictField            sysDictField;

    AsciiIO                 file;

    SysOperationProgress    showProgress;
    FileIOPermission        permission;
    ;

    permission      = new FileIOPermission(fileName, #io_write);
    permission.assert();

    file            = new AsciiIO(fileName, #io_write);

    if (!file)
    {
        throw error("Can't create file");
    }

    if (file.status() != IO_Status::Ok)
    {
        throw error("Bad status");
    }

    file.outFieldDelimiter(";");

    showProgress    = new SysOperationProgress();
    showProgress.setCaption("Export Id");
    showProgress.setAnimation(#AviTransfer);
    showProgress.setTotal(#ID_MAX);

    while (idx < #ID_MAX)
    {
        showProgress.setText(strfmt("Process in use"));
        showProgress.incCount();

        sysDictTable    = new SysDictTable(idx);

        if (sysDictTable)
        {
            for (fieldIdx = 1; fieldIdx = #ID_MIN && !sysDictField.isSystem())
                {
                    value = connull();
                    value = conins(value, #POS_TABLE_ID, idx);
                    value = conins(value, #POS_TABLE_NAME, sysDictTable.name());
                    value = conins(value, #POS_FIELD_ID, fieldId);
                    value = conins(value, #POS_FIELD_NAME, sysDictField.name());

                    file.write(value);
                }
            }
        }

        idx++;
    }

    file = null;
}

You must execute this job in all environnement ! (v3 and v5 for example)

Now, you must merge the two CSV file, and make the concordance between all file (sorry but you must work manually here ;-p)

And the second field, generate the code to copy paste in a job. And you need two file : one for write the code and one with the value (csv file)

static void AX2009_GenerateJobForUpdateIds(Args _args)
{
    #AviFiles
    #File
    
    #define.ID_MAX              (50000)

    #define.DELIMITER           (';')
    #define.TTSBEGIN            ("ttsBegin;")
    #define.TTSCOMMIT           ("ttsCommit;")
    
    #define.V3_TABLE_ID         (1)
    #define.V3_TABLE_NAME       (2)
    #define.V3_FIELD_ID         (3)
    #define.V3_FIELD_NAME       (4)
    #define.V5_TABLE_ID         (5)
    #define.V5_TABLE_NAME       (6)
    #define.V5_FIELD_ID         (7)
    #define.V5_FIELD_NAME       (8)
    

    int                         idx;

    container                   valueTableChange;
    container                   valueFieldChange;
    container                   valueRead;

    str                         fieldChange     = "ReleaseUpdateDB::changeFieldId(tableNum(%1), %2, %3, tableStr(%4), fieldStr(%4, %5));";
    str                         tableChange     = "ReleaseUpdateDB::changeTableId(%1, %2, tableStr(%3));";

    TableId                     tableId;

    FileName                    fileName        = "H:" + #FilePathDelimiter + "job_convert_id.txt";
    FileName                    fileNameRead    = "H:" + #FilePathDelimiter + "export_merge_id_v3_v5.csv";

    AsciiIO                     file;
    AsciiIO                     fileRead;
    FileIOPermission            permission;
    ;

    permission      = new FileIOPermission(fileName, #io_write);
    permission.assert();


    file            = new AsciiIO(fileName,     #io_write);
    fileRead        = new AsciiIO(fileNameRead, #io_read);

    if (!file)
    {
        throw error("Can't WRITE file");
    }

    if (file.status() != IO_Status::Ok)
    {
        throw error("WRITE : Bad status");
    }
    
    if (!fileRead)
    {
        throw error("Can't READ file");
    }
    
    if (file.status() != IO_Status::Ok)
    {
        throw error("READ : Bad status");
    }

    file.outFieldDelimiter('');
    fileRead.inFieldDelimiter(#DELIMITER);

//    showProgress    = new SysOperationProgress();
//    showProgress.setCaption("Export Id");
//    showProgress.setAnimation(#AviTransfer);
//    showProgress.setTotal(#ID_MAX);

    valueRead = fileRead.read();
    
    while (conLen(valueRead))
    {
        if (conpeek(valueRead, #V3_TABLE_ID) != conpeek(valueRead, #V5_TABLE_ID))
        {
            tableId = conpeek(valueRead, #V3_TABLE_ID);
            
            if (tableId >= #ID_MAX)
            {
                if (!confind(valueTableChange, strfmt(tableChange, conpeek(valueRead, #V3_TABLE_ID), conpeek(valueRead, #V5_TABLE_ID), conpeek(valueRead, #V5_TABLE_NAME))))
                {
                    valueTableChange += strfmt(tableChange, conpeek(valueRead, #V3_TABLE_ID), conpeek(valueRead, #V5_TABLE_ID), conpeek(valueRead, #V5_TABLE_NAME));
                }
            }
        }
        
        if (conpeek(valueRead, #V3_FIELD_ID) != conpeek(valueRead, #V5_FIELD_ID))
        {
            valueFieldChange += strfmt(fieldChange, conpeek(valueRead, #V5_TABLE_NAME),
                                                    conpeek(valueRead, #V3_FIELD_ID), 
                                                    conpeek(valueRead, #V5_FIELD_ID), 
                                                    conpeek(valueRead, #V5_TABLE_NAME),
                                                    conpeek(valueRead, #V5_FIELD_NAME));
        }

        valueRead = fileRead.read();
    }

    file.write(#DELIMITER);
    file.write(#TTSBEGIN);
    file.write("//Table change id");
    
    for (idx = 1; idx <= conlen(valueTableChange); idx++)
    {
        file.write(conpeek(valueTableChange, idx));
    }

    file.write("//Field change id");
    
    for (idx = 1; idx <= conlen(valueFieldChange); idx++)
    {
        file.write(conpeek(valueFieldChange, idx));
    }
    
    file.write(#TTSCOMMIT);

    file        = null;
    fileRead    = null;
}

PAY ATTENTION !

If the name of these table/field have change !, you must use this method where the name is the name the previous version !

static void AX2009_GenerateJobForUpdateIds(Args _args)
{
    #AviFiles
    #File

    #define.ID_MAX              (50000)

    #define.DELIMITER           (';')
    #define.TTSBEGIN            ("ttsBegin;")
    #define.TTSCOMMIT           ("ttsCommit;")

    #define.V3_TABLE_ID         (1)
    #define.V3_TABLE_NAME       (2)
    #define.V3_FIELD_ID         (3)
    #define.V3_FIELD_NAME       (4)
    #define.V5_TABLE_ID         (5)
    #define.V5_TABLE_NAME       (6)
    #define.V5_FIELD_ID         (7)
    #define.V5_FIELD_NAME       (8)


    int                         idx;

    container                   valueTableChange;
    container                   valueFieldChange;
    container                   valueRead;

    //str                         fieldChange     = "ReleaseUpdateDB::changeFieldId(tableNum(%1), %2, %3, tableStr(%4), fieldStr(%4, %5));";
    //str                         tableChange     = "ReleaseUpdateDB::changeTableId(%1, %2, tableStr(%3));";
    str                         fieldChange     = "ReleaseUpdateDB::changeFieldId(%1, %2, %3, '%4', '%5');";
    str                         tableChange     = "ReleaseUpdateDB::changeTableId(%1, %2, '%3');";

    TableId                     tableId;

    FileName                    fileName        = "H:" + #FilePathDelimiter + "DataMigration" + #FilePathDelimiter + "job_convert_id_v3_name.txt";
    FileName                    fileNameRead    = "H:" + #FilePathDelimiter + "DataMigration" + #FilePathDelimiter + "export_merge_id_v3_v5.csv";

    AsciiIO                     file;
    AsciiIO                     fileRead;
    FileIOPermission            permission;
    ;

    permission      = new FileIOPermission(fileName, #io_write);
    permission.assert();


    file            = new AsciiIO(fileName,     #io_write);
    fileRead        = new AsciiIO(fileNameRead, #io_read);

    if (!file)
    {
        throw error("Can't WRITE file");
    }

    if (file.status() != IO_Status::Ok)
    {
        throw error("WRITE : Bad status");
    }

    if (!fileRead)
    {
        throw error("Can't READ file");
    }

    if (file.status() != IO_Status::Ok)
    {
        throw error("READ : Bad status");
    }

    file.outFieldDelimiter('');
    fileRead.inFieldDelimiter(#DELIMITER);

//    showProgress    = new SysOperationProgress();
//    showProgress.setCaption("Export Id");
//    showProgress.setAnimation(#AviTransfer);
//    showProgress.setTotal(#ID_MAX);

    valueRead = fileRead.read();

    while (conLen(valueRead))
    {
        if (conpeek(valueRead, #V3_TABLE_ID) != conpeek(valueRead, #V5_TABLE_ID))
        {
            tableId = conpeek(valueRead, #V3_TABLE_ID);

            if (tableId >= #ID_MAX)
            {
                if (!confind(valueTableChange, strfmt(tableChange, conpeek(valueRead, #V3_TABLE_ID), conpeek(valueRead, #V5_TABLE_ID), conpeek(valueRead, #V3_TABLE_NAME)))) //#V5_TABLE_NAME))))
                {
                    valueTableChange += strfmt(tableChange, conpeek(valueRead, #V3_TABLE_ID), conpeek(valueRead, #V5_TABLE_ID), conpeek(valueRead, #V3_TABLE_NAME)); //#V5_TABLE_NAME));
                }
            }
        }

        if (conpeek(valueRead, #V3_FIELD_ID) != conpeek(valueRead, #V5_FIELD_ID))
        {
            valueFieldChange += strfmt(fieldChange, conpeek(valueRead, #V5_TABLE_ID), //#V5_TABLE_NAME),
                                                    conpeek(valueRead, #V3_FIELD_ID),
                                                    conpeek(valueRead, #V5_FIELD_ID),
                                                    conpeek(valueRead, #V3_TABLE_NAME), //#V5_TABLE_NAME),
                                                    conpeek(valueRead, #V3_FIELD_NAME)); //#V5_FIELD_NAME));
        }

        valueRead = fileRead.read();
    }

    file.write(#DELIMITER);
    file.write('\n');
    file.write(#TTSBEGIN);
    file.write("//Table change id");

    for (idx = 1; idx <= conlen(valueTableChange); idx++)
    {
        file.write(conpeek(valueTableChange, idx));
    }

    file.write("//Field change id");

    for (idx = 1; idx <= conlen(valueFieldChange); idx++)
    {
        file.write(conpeek(valueFieldChange, idx));
    }

    file.write(#TTSCOMMIT);

    file        = null;
    fileRead    = null;
}