zoukankan      html  css  js  c++  java
  • cocos2dx3.4 保存json文件

    头文件:

    #include "json/document.h"
    #include "json/stringbuffer.h"
    #include "json/writer.h"

    代码:

    void UserManage::SaveUserToFile()
    {
        do 
        {
            CC_BREAK_IF(_userMap.size()==0);
    
            rapidjson::Document doc;
            doc.SetObject();
            rapidjson::Document::AllocatorType& allocator=doc.GetAllocator();
            rapidjson::Value uList(rapidjson::kArrayType);
    
            UserMap::iterator it;
            for(it=_userMap.begin();it!=_userMap.end();it++)
            {
                User *pUser=it->second;
                if (pUser)
                {
                    rapidjson::Value uValue(rapidjson::kObjectType);
                    uValue.AddMember("UserId",pUser->_userId,allocator);
                    uValue.AddMember("AccountName",pUser->_accountName.c_str(),allocator);
                    uValue.AddMember("Password",pUser->_password.c_str(),allocator);
                    uValue.AddMember("RegisterTime",pUser->_registerTime,allocator);
                    uValue.AddMember("VipMoney",pUser->_vipMoney,allocator);
                    uValue.AddMember("VipLevel",pUser->_vipLevel,allocator);
                    uValue.AddMember("RoleId",pUser->_roleId,allocator);
    
                    uList.PushBack(uValue,allocator);
                }
            }
            rapidjson::StringBuffer buff;
            rapidjson::Writer<rapidjson::StringBuffer> writer(buff);
            doc.Accept(writer);
    
            FILE* pFile=fopen(_userCfgPath.c_str(),"wb");
            if (pFile)
            {
                fputs(buff.GetString(),pFile);
                fclose(pFile);
            }else
            {
                CCLOG("UserManage::SaveUserToFile error! can not open %s",_userCfgPath.c_str());
            }
    
        } while (0);
    
    }
  • 相关阅读:
    azkaben任务调度器
    HQL练习
    Hive基本操作
    Spark cache、checkpoint机制笔记
    2021年元旦云南之旅
    2020年总
    Windows Server 2016 如何恢复.NET Framework 4.6
    numpy和tensorflow中的广播机制
    查看spark RDD 各分区内容
    Spark RDD的默认分区数:(spark 2.1.0)
  • 原文地址:https://www.cnblogs.com/gamesky/p/4281453.html
Copyright © 2011-2022 走看看