shellksh

Avoid the duplication in the script


The code below is duplicated twice in the full script, how to avoid it?

        if jurge_expr ; then
            (( retry_cnt += 1 ))
            echo $CS2
            echo "reading CS2 checking failed. retry, retry_cnt=$retry_cnt"
            if [[ $retry_cnt -gt $max_retry_cnt ]]; then
                echo "$RED retry too many times, failed to write 0xe2 $bytes 2 $filename $RESET"
                break;
            fi
            sleep 0.5
            continue;
        fi

Here is the full script:

# Loop through each filename in sorted_files and print it
set -- $sorted_files
for filename; do
    addr=$(echo "$filename" | cut -d_ -f1)
    bytes=$(echo "$filename" | cut -d_ -f2 | sed -e 's/B.dat//' -e 's/KB.dat//')
    
    retry_cnt=0;
    while true; 
    do
        echo "bulk writing 0xe2 $bytes 2 $filename"
        ccidbgr /dev/cci2 0x6d bulkWrite 0xe2 $bytes 2 $filename
        sleep 0.5
        echo "reading CS2"
        CS2=$(ccidbgr /dev/cci2 0x6d read 0xF2 2>&1)
        if [[ $? -ne 0 ]]; then
            (( retry_cnt += 1 ))
            echo $CS2
            echo "reading CS2 checking failed. retry, retry_cnt=$retry_cnt"
            if [[ $retry_cnt -gt $max_retry_cnt ]]; then
                echo "$RED retry too many times, failed to write 0xe2 $bytes 2 $filename $RESET"
                break;
            fi
            sleep 0.5
            continue;
        fi
        
        echo $CS2
        CS2=$(echo $CS2 | cut -dx -f3)
        if [[ $CS2 -eq 0 ]]; then
            (( retry_cnt += 1 ))
            echo "read CS2 is zero. retry, retry_cnt=$retry_cnt"
                if [[ $retry_cnt -gt $max_retry_cnt ]]; then
                echo "$RED retry too many times, failed to write 0xe2 $bytes 2 $filename $RESET"
                break;
            fi
            sleep 0.5
            continue;
        fi
    done
done


Solution

  • For this particular case I'd probably merge the two (almost) duplicate blocks of code into a single block, using a few new variables to help with logic flow.

    One idea:

    set -- $sorted_files
    for filename; do
        addr=$(echo "$filename" | cut -d_ -f1)
        bytes=$(echo "$filename" | cut -d_ -f2 | sed -e 's/B.dat//' -e 's/KB.dat//')
        
        retry_cnt=0;
        while true; 
        do
            echo "bulk writing 0xe2 $bytes 2 $filename"
            ccidbgr /dev/cci2 0x6d bulkWrite 0xe2 $bytes 2 $filename
            sleep 0.5
            echo "reading CS2"
            CS2=$(ccidbgr /dev/cci2 0x6d read 0xF2 2>&1)
    
            ###################################### new/replacement code (start)
    
            rc=$?                                   # save return code
            IFS='x' read -ra cval <<< "$CS2"        # split CS2 on delimiter "x" into array cval[]
            msg=""                                  # clear msg
    
            # test two new vars (rc, cval[2])
    
            [[ "$rc"        -ne 0 ]] && { echo "$CS2"; msg="reading CS2 checking failed. retry, retry_cnt=$retry_cnt"; }
            [[ "${cval[2]}" -eq 0 ]] && {              msg="read CS2 is zero. retry, retry_cnt=$retry_cnt"           ; }
    
            if [[ -n "$msg" ]]                      # if msg is not empty then one of the 2 tests was 'true' so ...
            then
                (( retry_cnt++ ))
                echo "$msg"
                if [[ $retry_cnt -gt $max_retry_cnt ]]; then
                    echo "$RED retry too many times, failed to write 0xe2 $bytes 2 $filename $RESET"
                    break;
                fi
                sleep 0.5
                continue;
            fi
    
            ###################################### new/replacement code (end)
    
        done
    done
    

    NOTES: I don't have the environment to put this code through a test run but shellcheck.net doesn't report any syntax errors